Optona is a framework designed for automation and acceleration of optimization studies.¶
In [2]:
# Import necessary libraries
import optuna
from sklearn.datasets import load_diabetes
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import warnings
warnings.filterwarnings('ignore')
# Load the Pima Indian Diabetes dataset from sklearn
# Note: Scikit-learn's built-in 'load_diabetes' is a regression dataset.
# We will load the actual diabetes dataset from an external source
import pandas as pd
# Load the Pima Indian Diabetes dataset (from UCI repository)
url = "https://raw.githubusercontent.com/jbrownlee/Datasets/master/pima-indians-diabetes.data.csv"
columns = ['Pregnancies', 'Glucose', 'BloodPressure', 'SkinThickness', 'Insulin', 'BMI',
'DiabetesPedigreeFunction', 'Age', 'Outcome']
# Load the dataset
df = pd.read_csv(url, names=columns)
df.head()
Out[2]:
| Pregnancies | Glucose | BloodPressure | SkinThickness | Insulin | BMI | DiabetesPedigreeFunction | Age | Outcome | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 6 | 148 | 72 | 35 | 0 | 33.6 | 0.627 | 50 | 1 |
| 1 | 1 | 85 | 66 | 29 | 0 | 26.6 | 0.351 | 31 | 0 |
| 2 | 8 | 183 | 64 | 0 | 0 | 23.3 | 0.672 | 32 | 1 |
| 3 | 1 | 89 | 66 | 23 | 94 | 28.1 | 0.167 | 21 | 0 |
| 4 | 0 | 137 | 40 | 35 | 168 | 43.1 | 2.288 | 33 | 1 |
In [3]:
import numpy as np
# Replace zero values with NaN in columns where zero is not a valid value
cols_with_missing_vals = ['Glucose', 'BloodPressure', 'SkinThickness', 'Insulin', 'BMI']
df[cols_with_missing_vals] = df[cols_with_missing_vals].replace(0, np.nan)
# Impute the missing values with the mean of the respective column
df.fillna(df.mean(), inplace=True)
# Check if there are any remaining missing values
print(df.isnull().sum())
Pregnancies 0 Glucose 0 BloodPressure 0 SkinThickness 0 Insulin 0 BMI 0 DiabetesPedigreeFunction 0 Age 0 Outcome 0 dtype: int64
In [4]:
# Split into features (X) and target (y)
X = df.drop('Outcome', axis=1)
y = df['Outcome']
# Split data into training and test sets (70% train, 30% test)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
# Optional: Scale the data for better model performance
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
# Check the shape of the data
print(f'Training set shape: {X_train.shape}')
print(f'Test set shape: {X_test.shape}')
Training set shape: (537, 8) Test set shape: (231, 8)
In [5]:
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score
# Define the objective function
def objective(trial):
# Suggest values for the hyperparameters
n_estimators = trial.suggest_int('n_estimators', 50, 200)
max_depth = trial.suggest_int('max_depth', 3, 20)
# Create the RandomForestClassifier with suggested hyperparameters
model = RandomForestClassifier(
n_estimators=n_estimators,
max_depth=max_depth,
random_state=42
)
# Perform 3-fold cross-validation and calculate accuracy
score = cross_val_score(model, X_train, y_train, cv=3, scoring='accuracy').mean()
return score # Return the accuracy score for Optuna to maximize
In [6]:
# Create a study object and optimize the objective function
study = optuna.create_study(direction='maximize', sampler=optuna.samplers.TPESampler()) # We aim to maximize accuracy
study.optimize(objective, n_trials=50) # Run 50 trials to find the best hyperparameters
[I 2024-09-27 22:28:28,141] A new study created in memory with name: no-name-c21f6cb2-a215-40d7-a548-adb900b49106
[I 2024-09-27 22:28:29,763] Trial 0 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 195, 'max_depth': 14}. Best is trial 0 with value: 0.7709497206703911.
[I 2024-09-27 22:28:30,600] Trial 1 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 102, 'max_depth': 11}. Best is trial 0 with value: 0.7709497206703911.
[I 2024-09-27 22:28:31,818] Trial 2 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 147, 'max_depth': 6}. Best is trial 0 with value: 0.7709497206703911.
[I 2024-09-27 22:28:32,595] Trial 3 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 93, 'max_depth': 13}. Best is trial 0 with value: 0.7709497206703911.
[I 2024-09-27 22:28:33,946] Trial 4 finished with value: 0.7765363128491619 and parameters: {'n_estimators': 130, 'max_depth': 16}. Best is trial 4 with value: 0.7765363128491619.
[I 2024-09-27 22:28:35,197] Trial 5 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 148, 'max_depth': 19}. Best is trial 4 with value: 0.7765363128491619.
[I 2024-09-27 22:28:35,917] Trial 6 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 86, 'max_depth': 8}. Best is trial 4 with value: 0.7765363128491619.
[I 2024-09-27 22:28:36,672] Trial 7 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 92, 'max_depth': 15}. Best is trial 4 with value: 0.7765363128491619.
[I 2024-09-27 22:28:37,219] Trial 8 finished with value: 0.7765363128491621 and parameters: {'n_estimators': 61, 'max_depth': 17}. Best is trial 8 with value: 0.7765363128491621.
[I 2024-09-27 22:28:38,464] Trial 9 finished with value: 0.7597765363128491 and parameters: {'n_estimators': 167, 'max_depth': 4}. Best is trial 8 with value: 0.7765363128491621.
[I 2024-09-27 22:28:39,199] Trial 10 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 51, 'max_depth': 20}. Best is trial 8 with value: 0.7765363128491621.
[I 2024-09-27 22:28:39,674] Trial 11 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 51, 'max_depth': 17}. Best is trial 8 with value: 0.7765363128491621.
[I 2024-09-27 22:28:40,637] Trial 12 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 120, 'max_depth': 17}. Best is trial 8 with value: 0.7765363128491621.
[I 2024-09-27 22:28:41,722] Trial 13 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 125, 'max_depth': 10}. Best is trial 8 with value: 0.7765363128491621.
[I 2024-09-27 22:28:42,397] Trial 14 finished with value: 0.7783985102420857 and parameters: {'n_estimators': 75, 'max_depth': 17}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:43,044] Trial 15 finished with value: 0.7783985102420857 and parameters: {'n_estimators': 72, 'max_depth': 18}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:43,703] Trial 16 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 79, 'max_depth': 20}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:44,626] Trial 17 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 75, 'max_depth': 13}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:45,762] Trial 18 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 108, 'max_depth': 18}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:46,666] Trial 19 finished with value: 0.7616387337057727 and parameters: {'n_estimators': 72, 'max_depth': 9}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:47,351] Trial 20 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 65, 'max_depth': 15}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:47,891] Trial 21 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 62, 'max_depth': 18}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:48,438] Trial 22 finished with value: 0.7765363128491621 and parameters: {'n_estimators': 65, 'max_depth': 16}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:48,911] Trial 23 finished with value: 0.7746741154562384 and parameters: {'n_estimators': 54, 'max_depth': 18}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:49,851] Trial 24 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 108, 'max_depth': 13}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:50,980] Trial 25 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 81, 'max_depth': 20}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:51,888] Trial 26 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 97, 'max_depth': 16}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:52,506] Trial 27 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 66, 'max_depth': 14}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:53,441] Trial 28 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 82, 'max_depth': 19}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:55,159] Trial 29 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 189, 'max_depth': 14}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:56,509] Trial 30 finished with value: 0.7746741154562384 and parameters: {'n_estimators': 117, 'max_depth': 12}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:57,014] Trial 31 finished with value: 0.7765363128491621 and parameters: {'n_estimators': 61, 'max_depth': 16}. Best is trial 14 with value: 0.7783985102420857.
[I 2024-09-27 22:28:57,591] Trial 32 finished with value: 0.7802607076350094 and parameters: {'n_estimators': 70, 'max_depth': 17}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:28:58,157] Trial 33 finished with value: 0.7783985102420856 and parameters: {'n_estimators': 72, 'max_depth': 17}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:28:58,773] Trial 34 finished with value: 0.7783985102420857 and parameters: {'n_estimators': 71, 'max_depth': 19}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:28:59,966] Trial 35 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 88, 'max_depth': 19}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:01,205] Trial 36 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 105, 'max_depth': 19}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:03,004] Trial 37 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 142, 'max_depth': 15}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:04,102] Trial 38 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 99, 'max_depth': 18}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:04,934] Trial 39 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 88, 'max_depth': 11}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:05,670] Trial 40 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 76, 'max_depth': 6}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:06,351] Trial 41 finished with value: 0.7802607076350094 and parameters: {'n_estimators': 73, 'max_depth': 17}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:07,025] Trial 42 finished with value: 0.7783985102420857 and parameters: {'n_estimators': 70, 'max_depth': 19}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:07,889] Trial 43 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 59, 'max_depth': 17}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:08,727] Trial 44 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 94, 'max_depth': 16}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:09,267] Trial 45 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 57, 'max_depth': 20}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:10,020] Trial 46 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 84, 'max_depth': 18}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:10,745] Trial 47 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 69, 'max_depth': 15}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:12,405] Trial 48 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 173, 'max_depth': 17}. Best is trial 32 with value: 0.7802607076350094.
[I 2024-09-27 22:29:13,431] Trial 49 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 78, 'max_depth': 20}. Best is trial 32 with value: 0.7802607076350094.
In [7]:
# Print the best result
print(f'Best trial accuracy: {study.best_trial.value}')
print(f'Best hyperparameters: {study.best_trial.params}')
Best trial accuracy: 0.7802607076350094
Best hyperparameters: {'n_estimators': 70, 'max_depth': 17}
In [8]:
from sklearn.metrics import accuracy_score
# Train a RandomForestClassifier using the best hyperparameters from Optuna
best_model = RandomForestClassifier(**study.best_trial.params, random_state=42)
# Fit the model to the training data
best_model.fit(X_train, y_train)
# Make predictions on the test set
y_pred = best_model.predict(X_test)
# Calculate the accuracy on the test set
test_accuracy = accuracy_score(y_test, y_pred)
# Print the test accuracy
print(f'Test Accuracy with best hyperparameters: {test_accuracy:.2f}')
Test Accuracy with best hyperparameters: 0.75
Samplers in Optuna¶
In [9]:
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score
# Define the objective function
def objective(trial):
# Suggest values for the hyperparameters
n_estimators = trial.suggest_int('n_estimators', 50, 200)
max_depth = trial.suggest_int('max_depth', 3, 20)
# Create the RandomForestClassifier with suggested hyperparameters
model = RandomForestClassifier(
n_estimators=n_estimators,
max_depth=max_depth,
random_state=42
)
# Perform 3-fold cross-validation and calculate accuracy
score = cross_val_score(model, X_train, y_train, cv=3, scoring='accuracy').mean()
return score # Return the accuracy score for Optuna to maximize
In [10]:
study = optuna.create_study(direction='maximize', sampler=optuna.samplers.RandomSampler()) # We aim to maximize accuracy
study.optimize(objective, n_trials=50) # Run 50 trials to find the best hyperparameters
[I 2024-09-27 22:29:51,290] A new study created in memory with name: no-name-a24f90f1-aae5-466d-a31f-861cdfa7cebf
[I 2024-09-27 22:29:52,288] Trial 0 finished with value: 0.7597765363128491 and parameters: {'n_estimators': 92, 'max_depth': 9}. Best is trial 0 with value: 0.7597765363128491.
[I 2024-09-27 22:29:53,275] Trial 1 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 99, 'max_depth': 16}. Best is trial 1 with value: 0.7672253258845437.
[I 2024-09-27 22:29:54,460] Trial 2 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 144, 'max_depth': 18}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:29:55,356] Trial 3 finished with value: 0.7560521415270017 and parameters: {'n_estimators': 107, 'max_depth': 3}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:29:57,729] Trial 4 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 187, 'max_depth': 5}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:29:58,915] Trial 5 finished with value: 0.7560521415270017 and parameters: {'n_estimators': 101, 'max_depth': 3}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:30:00,085] Trial 6 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 109, 'max_depth': 10}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:30:01,360] Trial 7 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 138, 'max_depth': 9}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:30:03,213] Trial 8 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 181, 'max_depth': 15}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:30:04,862] Trial 9 finished with value: 0.7616387337057727 and parameters: {'n_estimators': 148, 'max_depth': 9}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:30:06,101] Trial 10 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 124, 'max_depth': 10}. Best is trial 2 with value: 0.7709497206703911.
[I 2024-09-27 22:30:07,504] Trial 11 finished with value: 0.7802607076350093 and parameters: {'n_estimators': 133, 'max_depth': 16}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:08,577] Trial 12 finished with value: 0.7783985102420856 and parameters: {'n_estimators': 123, 'max_depth': 16}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:10,735] Trial 13 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 144, 'max_depth': 5}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:11,570] Trial 14 finished with value: 0.7746741154562384 and parameters: {'n_estimators': 57, 'max_depth': 16}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:12,983] Trial 15 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 177, 'max_depth': 13}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:14,452] Trial 16 finished with value: 0.7560521415270017 and parameters: {'n_estimators': 200, 'max_depth': 6}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:16,084] Trial 17 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 159, 'max_depth': 18}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:17,565] Trial 18 finished with value: 0.7597765363128491 and parameters: {'n_estimators': 132, 'max_depth': 4}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:20,282] Trial 19 finished with value: 0.7616387337057727 and parameters: {'n_estimators': 197, 'max_depth': 4}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:22,429] Trial 20 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 176, 'max_depth': 18}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:23,985] Trial 21 finished with value: 0.7746741154562383 and parameters: {'n_estimators': 130, 'max_depth': 12}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:25,255] Trial 22 finished with value: 0.7765363128491619 and parameters: {'n_estimators': 134, 'max_depth': 19}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:25,896] Trial 23 finished with value: 0.7765363128491621 and parameters: {'n_estimators': 54, 'max_depth': 17}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:27,551] Trial 24 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 146, 'max_depth': 14}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:28,824] Trial 25 finished with value: 0.7765363128491619 and parameters: {'n_estimators': 135, 'max_depth': 18}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:29,963] Trial 26 finished with value: 0.7783985102420856 and parameters: {'n_estimators': 113, 'max_depth': 19}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:32,275] Trial 27 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 179, 'max_depth': 13}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:33,262] Trial 28 finished with value: 0.7597765363128491 and parameters: {'n_estimators': 71, 'max_depth': 10}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:34,963] Trial 29 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 166, 'max_depth': 10}. Best is trial 11 with value: 0.7802607076350093.
[I 2024-09-27 22:30:35,683] Trial 30 finished with value: 0.7821229050279331 and parameters: {'n_estimators': 73, 'max_depth': 20}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:36,446] Trial 31 finished with value: 0.7597765363128491 and parameters: {'n_estimators': 80, 'max_depth': 9}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:37,857] Trial 32 finished with value: 0.756052141527002 and parameters: {'n_estimators': 120, 'max_depth': 3}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:39,265] Trial 33 finished with value: 0.7783985102420856 and parameters: {'n_estimators': 127, 'max_depth': 18}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:39,694] Trial 34 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 50, 'max_depth': 8}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:40,819] Trial 35 finished with value: 0.7765363128491621 and parameters: {'n_estimators': 107, 'max_depth': 16}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:42,048] Trial 36 finished with value: 0.7746741154562384 and parameters: {'n_estimators': 126, 'max_depth': 14}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:42,520] Trial 37 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 51, 'max_depth': 14}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:44,473] Trial 38 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 199, 'max_depth': 8}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:45,035] Trial 39 finished with value: 0.7579143389199255 and parameters: {'n_estimators': 67, 'max_depth': 10}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:46,800] Trial 40 finished with value: 0.7579143389199254 and parameters: {'n_estimators': 155, 'max_depth': 4}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:48,869] Trial 41 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 165, 'max_depth': 8}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:49,744] Trial 42 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 68, 'max_depth': 13}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:50,520] Trial 43 finished with value: 0.756052141527002 and parameters: {'n_estimators': 84, 'max_depth': 3}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:52,032] Trial 44 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 136, 'max_depth': 8}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:54,005] Trial 45 finished with value: 0.7746741154562384 and parameters: {'n_estimators': 168, 'max_depth': 12}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:55,956] Trial 46 finished with value: 0.7616387337057727 and parameters: {'n_estimators': 114, 'max_depth': 9}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:57,965] Trial 47 finished with value: 0.7635009310986964 and parameters: {'n_estimators': 187, 'max_depth': 5}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:30:59,753] Trial 48 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 139, 'max_depth': 16}. Best is trial 30 with value: 0.7821229050279331.
[I 2024-09-27 22:31:01,500] Trial 49 finished with value: 0.7802607076350093 and parameters: {'n_estimators': 123, 'max_depth': 18}. Best is trial 30 with value: 0.7821229050279331.
In [11]:
# Print the best result
print(f'Best trial accuracy: {study.best_trial.value}')
print(f'Best hyperparameters: {study.best_trial.params}')
Best trial accuracy: 0.7821229050279331
Best hyperparameters: {'n_estimators': 73, 'max_depth': 20}
In [12]:
from sklearn.metrics import accuracy_score
# Train a RandomForestClassifier using the best hyperparameters from Optuna
best_model = RandomForestClassifier(**study.best_trial.params, random_state=42)
# Fit the model to the training data
best_model.fit(X_train, y_train)
# Make predictions on the test set
y_pred = best_model.predict(X_test)
# Calculate the accuracy on the test set
test_accuracy = accuracy_score(y_test, y_pred)
# Print the test accuracy
print(f'Test Accuracy with best hyperparameters: {test_accuracy:.2f}')
Test Accuracy with best hyperparameters: 0.75
In [13]:
search_space = {
'n_estimators': [50, 100, 150, 200],
'max_depth': [5, 10, 15, 20]
}
In [14]:
# Create a study and optimize it using GridSampler
study = optuna.create_study(direction='maximize', sampler=optuna.samplers.GridSampler(search_space))
study.optimize(objective)
[I 2024-09-27 22:31:11,562] A new study created in memory with name: no-name-1258bd35-2fc8-4965-9747-ac336a4ff2d8
[I 2024-09-27 22:31:12,565] Trial 0 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 100, 'max_depth': 5}. Best is trial 0 with value: 0.7690875232774674.
[I 2024-09-27 22:31:13,849] Trial 1 finished with value: 0.7672253258845437 and parameters: {'n_estimators': 150, 'max_depth': 10}. Best is trial 0 with value: 0.7690875232774674.
[I 2024-09-27 22:31:14,327] Trial 2 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 50, 'max_depth': 15}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:15,268] Trial 3 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 100, 'max_depth': 15}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:16,336] Trial 4 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 100, 'max_depth': 20}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:16,978] Trial 5 finished with value: 0.7579143389199254 and parameters: {'n_estimators': 50, 'max_depth': 10}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:18,393] Trial 6 finished with value: 0.7653631284916201 and parameters: {'n_estimators': 150, 'max_depth': 5}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:19,810] Trial 7 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 150, 'max_depth': 20}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:21,224] Trial 8 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 150, 'max_depth': 15}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:23,914] Trial 9 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 200, 'max_depth': 10}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:26,079] Trial 10 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 200, 'max_depth': 20}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:28,375] Trial 11 finished with value: 0.7728119180633147 and parameters: {'n_estimators': 200, 'max_depth': 15}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:30,787] Trial 12 finished with value: 0.7690875232774674 and parameters: {'n_estimators': 200, 'max_depth': 5}. Best is trial 2 with value: 0.7728119180633147.
[I 2024-09-27 22:31:31,262] Trial 13 finished with value: 0.7746741154562384 and parameters: {'n_estimators': 50, 'max_depth': 5}. Best is trial 13 with value: 0.7746741154562384.
[I 2024-09-27 22:31:32,192] Trial 14 finished with value: 0.7616387337057727 and parameters: {'n_estimators': 100, 'max_depth': 10}. Best is trial 13 with value: 0.7746741154562384.
[I 2024-09-27 22:31:32,689] Trial 15 finished with value: 0.7709497206703911 and parameters: {'n_estimators': 50, 'max_depth': 20}. Best is trial 13 with value: 0.7746741154562384.
In [15]:
# Print the best result
print(f'Best trial accuracy: {study.best_trial.value}')
print(f'Best hyperparameters: {study.best_trial.params}')
Best trial accuracy: 0.7746741154562384
Best hyperparameters: {'n_estimators': 50, 'max_depth': 5}
In [16]:
from sklearn.metrics import accuracy_score
# Train a RandomForestClassifier using the best hyperparameters from Optuna
best_model = RandomForestClassifier(**study.best_trial.params, random_state=42)
# Fit the model to the training data
best_model.fit(X_train, y_train)
# Make predictions on the test set
y_pred = best_model.predict(X_test)
# Calculate the accuracy on the test set
test_accuracy = accuracy_score(y_test, y_pred)
# Print the test accuracy
print(f'Test Accuracy with best hyperparameters: {test_accuracy:.2f}')
Test Accuracy with best hyperparameters: 0.74
Optuna Visualizations¶
In [17]:
# For visualizations
from optuna.visualization import plot_optimization_history, plot_parallel_coordinate, plot_slice, plot_contour, plot_param_importances
In [18]:
# 1. Optimization History
plot_optimization_history(study).show()
In [19]:
# 2. Parallel Coordinates Plot
plot_parallel_coordinate(study).show()
In [20]:
# 3. Slice Plot
plot_slice(study).show()
In [21]:
# 4. Contour Plot
plot_contour(study).show()
In [22]:
# 5. Hyperparameter Importance
plot_param_importances(study).show()
Optimizing Multiple ML Models¶
In [23]:
# Importing the required libraries
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.svm import SVC
In [24]:
# Define the objective function for Optuna
def objective(trial):
# Choose the algorithm to tune
classifier_name = trial.suggest_categorical('classifier', ['SVM', 'RandomForest', 'GradientBoosting'])
if classifier_name == 'SVM':
# SVM hyperparameters
c = trial.suggest_float('C', 0.1, 100, log=True)
kernel = trial.suggest_categorical('kernel', ['linear', 'rbf', 'poly', 'sigmoid'])
gamma = trial.suggest_categorical('gamma', ['scale', 'auto'])
model = SVC(C=c, kernel=kernel, gamma=gamma, random_state=42)
elif classifier_name == 'RandomForest':
# Random Forest hyperparameters
n_estimators = trial.suggest_int('n_estimators', 50, 300)
max_depth = trial.suggest_int('max_depth', 3, 20)
min_samples_split = trial.suggest_int('min_samples_split', 2, 10)
min_samples_leaf = trial.suggest_int('min_samples_leaf', 1, 10)
bootstrap = trial.suggest_categorical('bootstrap', [True, False])
model = RandomForestClassifier(
n_estimators=n_estimators,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
bootstrap=bootstrap,
random_state=42
)
elif classifier_name == 'GradientBoosting':
# Gradient Boosting hyperparameters
n_estimators = trial.suggest_int('n_estimators', 50, 300)
learning_rate = trial.suggest_float('learning_rate', 0.01, 0.3, log=True)
max_depth = trial.suggest_int('max_depth', 3, 20)
min_samples_split = trial.suggest_int('min_samples_split', 2, 10)
min_samples_leaf = trial.suggest_int('min_samples_leaf', 1, 10)
model = GradientBoostingClassifier(
n_estimators=n_estimators,
learning_rate=learning_rate,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
random_state=42
)
# Perform cross-validation and return the mean accuracy
score = cross_val_score(model, X_train, y_train, cv=3, scoring='accuracy').mean()
return score
In [25]:
# Create a study and optimize it using CmaEsSampler
study = optuna.create_study(direction='maximize')
study.optimize(objective, n_trials=100)
[I 2024-09-27 22:33:18,376] A new study created in memory with name: no-name-2f5943cb-bb5f-426e-9483-49482ec71544
[I 2024-09-27 22:33:18,419] Trial 0 finished with value: 0.7039106145251397 and parameters: {'classifier': 'SVM', 'C': 5.127905443740188, 'kernel': 'sigmoid', 'gamma': 'auto'}. Best is trial 0 with value: 0.7039106145251397.
[I 2024-09-27 22:33:19,344] Trial 1 finished with value: 0.7690875232774674 and parameters: {'classifier': 'RandomForest', 'n_estimators': 163, 'max_depth': 9, 'min_samples_split': 10, 'min_samples_leaf': 4, 'bootstrap': False}. Best is trial 1 with value: 0.7690875232774674.
[I 2024-09-27 22:33:20,608] Trial 2 finished with value: 0.7672253258845437 and parameters: {'classifier': 'RandomForest', 'n_estimators': 204, 'max_depth': 14, 'min_samples_split': 6, 'min_samples_leaf': 7, 'bootstrap': False}. Best is trial 1 with value: 0.7690875232774674.
[I 2024-09-27 22:33:21,870] Trial 3 finished with value: 0.7709497206703911 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 181, 'learning_rate': 0.021166359875068444, 'max_depth': 3, 'min_samples_split': 7, 'min_samples_leaf': 8}. Best is trial 3 with value: 0.7709497206703911.
[I 2024-09-27 22:33:23,222] Trial 4 finished with value: 0.7709497206703909 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 63, 'learning_rate': 0.053246312728341405, 'max_depth': 15, 'min_samples_split': 5, 'min_samples_leaf': 6}. Best is trial 3 with value: 0.7709497206703911.
[I 2024-09-27 22:33:29,829] Trial 5 finished with value: 0.7355679702048418 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 192, 'learning_rate': 0.10050496773552682, 'max_depth': 17, 'min_samples_split': 10, 'min_samples_leaf': 2}. Best is trial 3 with value: 0.7709497206703911.
[I 2024-09-27 22:33:31,926] Trial 6 finished with value: 0.74487895716946 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 67, 'learning_rate': 0.032125358501639965, 'max_depth': 15, 'min_samples_split': 6, 'min_samples_leaf': 4}. Best is trial 3 with value: 0.7709497206703911.
[I 2024-09-27 22:33:32,030] Trial 7 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 13.163292741701442, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:40,867] Trial 8 finished with value: 0.7467411545623835 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 276, 'learning_rate': 0.010955910694160043, 'max_depth': 18, 'min_samples_split': 7, 'min_samples_leaf': 4}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:43,077] Trial 9 finished with value: 0.7635009310986964 and parameters: {'classifier': 'RandomForest', 'n_estimators': 260, 'max_depth': 20, 'min_samples_split': 4, 'min_samples_leaf': 4, 'bootstrap': False}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:43,571] Trial 10 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 71.15002997177179, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:43,978] Trial 11 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 63.339757663325344, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:44,342] Trial 12 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 51.20027498585638, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:44,433] Trial 13 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 8.794987416440552, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:44,504] Trial 14 finished with value: 0.7541899441340782 and parameters: {'classifier': 'SVM', 'C': 0.2157883755959842, 'kernel': 'rbf', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:44,592] Trial 15 finished with value: 0.7057728119180632 and parameters: {'classifier': 'SVM', 'C': 19.969541278446624, 'kernel': 'poly', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:44,647] Trial 16 finished with value: 0.7839851024208566 and parameters: {'classifier': 'SVM', 'C': 1.061284332565701, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:45,324] Trial 17 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 89.67461250128329, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:45,422] Trial 18 finished with value: 0.7076350093109869 and parameters: {'classifier': 'SVM', 'C': 18.204054659497388, 'kernel': 'poly', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:45,511] Trial 19 finished with value: 0.7690875232774674 and parameters: {'classifier': 'SVM', 'C': 1.797504971353138, 'kernel': 'rbf', 'gamma': 'auto'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:46,617] Trial 20 finished with value: 0.7635009310986964 and parameters: {'classifier': 'RandomForest', 'n_estimators': 118, 'max_depth': 8, 'min_samples_split': 2, 'min_samples_leaf': 10, 'bootstrap': True}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:46,914] Trial 21 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 39.4265905032052, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:47,626] Trial 22 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 85.83704796917827, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:47,831] Trial 23 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 19.63025932393597, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:47,883] Trial 24 finished with value: 0.7001862197392924 and parameters: {'classifier': 'SVM', 'C': 34.67342024070224, 'kernel': 'sigmoid', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:47,990] Trial 25 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 9.381243887443818, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:48,265] Trial 26 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 40.71020780118806, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:48,794] Trial 27 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 86.64076708396652, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:49,799] Trial 28 finished with value: 0.7616387337057727 and parameters: {'classifier': 'RandomForest', 'n_estimators': 126, 'max_depth': 4, 'min_samples_split': 2, 'min_samples_leaf': 1, 'bootstrap': True}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:49,850] Trial 29 finished with value: 0.696461824953445 and parameters: {'classifier': 'SVM', 'C': 10.141781395893432, 'kernel': 'sigmoid', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:49,918] Trial 30 finished with value: 0.7579143389199254 and parameters: {'classifier': 'SVM', 'C': 4.413131355681025, 'kernel': 'rbf', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:50,202] Trial 31 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 46.862176132591024, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:50,539] Trial 32 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 56.32371207990227, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:50,759] Trial 33 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 24.48671522754418, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:53,597] Trial 34 finished with value: 0.7560521415270017 and parameters: {'classifier': 'RandomForest', 'n_estimators': 296, 'max_depth': 11, 'min_samples_split': 8, 'min_samples_leaf': 10, 'bootstrap': True}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:54,039] Trial 35 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 96.85425627323056, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:57,259] Trial 36 finished with value: 0.7392923649906891 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 237, 'learning_rate': 0.27163998609109996, 'max_depth': 6, 'min_samples_split': 4, 'min_samples_leaf': 8}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:33:57,330] Trial 37 finished with value: 0.7113594040968342 and parameters: {'classifier': 'SVM', 'C': 13.94311931196979, 'kernel': 'poly', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:00,525] Trial 38 finished with value: 0.7523277467411544 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 129, 'learning_rate': 0.2664611610137435, 'max_depth': 12, 'min_samples_split': 9, 'min_samples_leaf': 2}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:01,177] Trial 39 finished with value: 0.7839851024208566 and parameters: {'classifier': 'RandomForest', 'n_estimators': 99, 'max_depth': 20, 'min_samples_split': 3, 'min_samples_leaf': 9, 'bootstrap': False}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:01,525] Trial 40 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 53.65805539821523, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:01,608] Trial 41 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 7.304713901391382, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:01,801] Trial 42 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 27.34858814657245, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:01,876] Trial 43 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 2.6713141199037005, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:01,998] Trial 44 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 12.714190690468206, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:05,991] Trial 45 finished with value: 0.7616387337057727 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 226, 'learning_rate': 0.12416221421209432, 'max_depth': 6, 'min_samples_split': 8, 'min_samples_leaf': 6}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:06,074] Trial 46 finished with value: 0.7039106145251397 and parameters: {'classifier': 'SVM', 'C': 5.157175655030918, 'kernel': 'sigmoid', 'gamma': 'auto'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:06,519] Trial 47 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 56.12680458798174, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:06,634] Trial 48 finished with value: 0.7355679702048418 and parameters: {'classifier': 'SVM', 'C': 31.42605627389554, 'kernel': 'rbf', 'gamma': 'auto'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:14,537] Trial 49 finished with value: 0.6945996275605214 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 156, 'learning_rate': 0.010589772016952943, 'max_depth': 11, 'min_samples_split': 4, 'min_samples_leaf': 1}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:16,590] Trial 50 finished with value: 0.7653631284916201 and parameters: {'classifier': 'RandomForest', 'n_estimators': 232, 'max_depth': 8, 'min_samples_split': 5, 'min_samples_leaf': 5, 'bootstrap': True}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:16,973] Trial 51 finished with value: 0.7858472998137801 and parameters: {'classifier': 'SVM', 'C': 73.23749076601968, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 7 with value: 0.7858472998137801.
[I 2024-09-27 22:34:17,017] Trial 52 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.3330276900641483, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 52 with value: 0.7858472998137803.
[I 2024-09-27 22:34:17,059] Trial 53 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.18674606544736794, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 53 with value: 0.7877094972067038.
[I 2024-09-27 22:34:17,102] Trial 54 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.12557178938087898, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,147] Trial 55 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.11368319134435127, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,194] Trial 56 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.10581557956507609, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,232] Trial 57 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.10782373475657542, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,278] Trial 58 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.10311230481411368, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,330] Trial 59 finished with value: 0.707635009310987 and parameters: {'classifier': 'SVM', 'C': 0.1014948562761641, 'kernel': 'poly', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,367] Trial 60 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.10308205056490252, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,417] Trial 61 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.10932627106459919, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,462] Trial 62 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.16254483886731622, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,509] Trial 63 finished with value: 0.7839851024208566 and parameters: {'classifier': 'SVM', 'C': 0.3382044927114348, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,556] Trial 64 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.15315908746495085, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,596] Trial 65 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.1740032333224884, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,633] Trial 66 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.28294028909238217, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,675] Trial 67 finished with value: 0.7746741154562384 and parameters: {'classifier': 'SVM', 'C': 0.14713360896256278, 'kernel': 'sigmoid', 'gamma': 'auto'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,712] Trial 68 finished with value: 0.7839851024208566 and parameters: {'classifier': 'SVM', 'C': 0.49445585600480974, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:17,801] Trial 69 finished with value: 0.7541899441340782 and parameters: {'classifier': 'SVM', 'C': 0.22660710395039096, 'kernel': 'rbf', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:18,418] Trial 70 finished with value: 0.7783985102420856 and parameters: {'classifier': 'RandomForest', 'n_estimators': 81, 'max_depth': 5, 'min_samples_split': 9, 'min_samples_leaf': 3, 'bootstrap': False}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:18,456] Trial 71 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.13045735336492298, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:18,503] Trial 72 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.14457506479445156, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:18,549] Trial 73 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.13861160208357307, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:18,606] Trial 74 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.14898077395098383, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:18,653] Trial 75 finished with value: 0.7150837988826816 and parameters: {'classifier': 'SVM', 'C': 0.2141317150433384, 'kernel': 'poly', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:18,701] Trial 76 finished with value: 0.7839851024208566 and parameters: {'classifier': 'SVM', 'C': 0.5057881427835088, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:23,700] Trial 77 finished with value: 0.7318435754189944 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 208, 'learning_rate': 0.07312265239041675, 'max_depth': 17, 'min_samples_split': 3, 'min_samples_leaf': 7}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:23,775] Trial 78 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.13881495239704905, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:23,848] Trial 79 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.14396278979173552, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:23,900] Trial 80 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.13832971170498612, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:23,969] Trial 81 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.2510742517889551, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:24,025] Trial 82 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.13703460233592912, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:24,084] Trial 83 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.14671033716178772, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:24,142] Trial 84 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.19943029680052562, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:24,209] Trial 85 finished with value: 0.7839851024208566 and parameters: {'classifier': 'SVM', 'C': 0.45649625601519234, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:24,277] Trial 86 finished with value: 0.7560521415270017 and parameters: {'classifier': 'SVM', 'C': 0.2995871391425439, 'kernel': 'sigmoid', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:25,650] Trial 87 finished with value: 0.7635009310986964 and parameters: {'classifier': 'RandomForest', 'n_estimators': 146, 'max_depth': 13, 'min_samples_split': 7, 'min_samples_leaf': 9, 'bootstrap': True}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:25,699] Trial 88 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.12685776692812445, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:25,774] Trial 89 finished with value: 0.7616387337057727 and parameters: {'classifier': 'SVM', 'C': 0.17286380698808376, 'kernel': 'rbf', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,592] Trial 90 finished with value: 0.7579143389199255 and parameters: {'classifier': 'GradientBoosting', 'n_estimators': 93, 'learning_rate': 0.023074928607194418, 'max_depth': 18, 'min_samples_split': 5, 'min_samples_leaf': 5}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,641] Trial 91 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.13316036513212437, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,690] Trial 92 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.12794077216094413, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,742] Trial 93 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.24742230023719408, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,791] Trial 94 finished with value: 0.7877094972067038 and parameters: {'classifier': 'SVM', 'C': 0.17015439122427506, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,844] Trial 95 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.7503811362380767, 'kernel': 'linear', 'gamma': 'auto'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,897] Trial 96 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.20460591954669974, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:28,956] Trial 97 finished with value: 0.7895716945996275 and parameters: {'classifier': 'SVM', 'C': 0.12934764318312444, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:29,016] Trial 98 finished with value: 0.7132216014897579 and parameters: {'classifier': 'SVM', 'C': 0.15132715253215714, 'kernel': 'poly', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
[I 2024-09-27 22:34:29,074] Trial 99 finished with value: 0.7858472998137803 and parameters: {'classifier': 'SVM', 'C': 0.18748816846417063, 'kernel': 'linear', 'gamma': 'scale'}. Best is trial 54 with value: 0.7895716945996275.
In [26]:
# Retrieve the best trial
best_trial = study.best_trial
print("Best trial parameters:", best_trial.params)
print("Best trial accuracy:", best_trial.value)
Best trial parameters: {'classifier': 'SVM', 'C': 0.12557178938087898, 'kernel': 'linear', 'gamma': 'scale'}
Best trial accuracy: 0.7895716945996275
In [27]:
study.trials_dataframe()
Out[27]:
| number | value | datetime_start | datetime_complete | duration | params_C | params_bootstrap | params_classifier | params_gamma | params_kernel | params_learning_rate | params_max_depth | params_min_samples_leaf | params_min_samples_split | params_n_estimators | state | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 0.703911 | 2024-09-27 22:33:18.378679 | 2024-09-27 22:33:18.418704 | 0 days 00:00:00.040025 | 5.127905 | NaN | SVM | auto | sigmoid | NaN | NaN | NaN | NaN | NaN | COMPLETE |
| 1 | 1 | 0.769088 | 2024-09-27 22:33:18.420699 | 2024-09-27 22:33:19.344689 | 0 days 00:00:00.923990 | NaN | False | RandomForest | NaN | NaN | NaN | 9.0 | 4.0 | 10.0 | 163.0 | COMPLETE |
| 2 | 2 | 0.767225 | 2024-09-27 22:33:19.345686 | 2024-09-27 22:33:20.608171 | 0 days 00:00:01.262485 | NaN | False | RandomForest | NaN | NaN | NaN | 14.0 | 7.0 | 6.0 | 204.0 | COMPLETE |
| 3 | 3 | 0.770950 | 2024-09-27 22:33:20.610159 | 2024-09-27 22:33:21.869979 | 0 days 00:00:01.259820 | NaN | NaN | GradientBoosting | NaN | NaN | 0.021166 | 3.0 | 8.0 | 7.0 | 181.0 | COMPLETE |
| 4 | 4 | 0.770950 | 2024-09-27 22:33:21.871990 | 2024-09-27 22:33:23.222005 | 0 days 00:00:01.350015 | NaN | NaN | GradientBoosting | NaN | NaN | 0.053246 | 15.0 | 6.0 | 5.0 | 63.0 | COMPLETE |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 95 | 95 | 0.785847 | 2024-09-27 22:34:28.792905 | 2024-09-27 22:34:28.843902 | 0 days 00:00:00.050997 | 0.750381 | NaN | SVM | auto | linear | NaN | NaN | NaN | NaN | NaN | COMPLETE |
| 96 | 96 | 0.785847 | 2024-09-27 22:34:28.844921 | 2024-09-27 22:34:28.896903 | 0 days 00:00:00.051982 | 0.204606 | NaN | SVM | scale | linear | NaN | NaN | NaN | NaN | NaN | COMPLETE |
| 97 | 97 | 0.789572 | 2024-09-27 22:34:28.898904 | 2024-09-27 22:34:28.955913 | 0 days 00:00:00.057009 | 0.129348 | NaN | SVM | scale | linear | NaN | NaN | NaN | NaN | NaN | COMPLETE |
| 98 | 98 | 0.713222 | 2024-09-27 22:34:28.957909 | 2024-09-27 22:34:29.016904 | 0 days 00:00:00.058995 | 0.151327 | NaN | SVM | scale | poly | NaN | NaN | NaN | NaN | NaN | COMPLETE |
| 99 | 99 | 0.785847 | 2024-09-27 22:34:29.017901 | 2024-09-27 22:34:29.073905 | 0 days 00:00:00.056004 | 0.187488 | NaN | SVM | scale | linear | NaN | NaN | NaN | NaN | NaN | COMPLETE |
100 rows × 16 columns
In [28]:
study.trials_dataframe()['params_classifier'].value_counts()
Out[28]:
params_classifier SVM 79 GradientBoosting 11 RandomForest 10 Name: count, dtype: int64
In [29]:
study.trials_dataframe().groupby('params_classifier')['value'].mean()
Out[29]:
params_classifier GradientBoosting 0.746064 RandomForest 0.767225 SVM 0.773826 Name: value, dtype: float64
In [30]:
# 1. Optimization History
plot_optimization_history(study).show()
In [31]:
# 3. Slice Plot
plot_slice(study).show()
In [32]:
# 5. Hyperparameter Importance
plot_param_importances(study).show()
In [36]:
import optuna
import xgboost as xgb
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_iris
from sklearn.metrics import accuracy_score
import numpy as np
# Load the Iris dataset
X, y = load_iris(return_X_y=True)
# Split the dataset into training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Define the objective function for XGBoost
def objective(trial):
# Hyperparameter search space
param = {
'verbosity': 0,
'objective': 'multi:softprob',
'num_class': 3,
'eval_metric': 'mlogloss', # Ensure that the eval_metric is specified here
'booster': 'gbtree',
'lambda': trial.suggest_float('lambda', 1e-8, 1.0, log=True),
'alpha': trial.suggest_float('alpha', 1e-8, 1.0, log=True),
'eta': trial.suggest_float('eta', 0.01, 0.3),
'gamma': trial.suggest_float('gamma', 1e-8, 1.0, log=True),
'max_depth': trial.suggest_int('max_depth', 3, 9),
'min_child_weight': trial.suggest_int('min_child_weight', 1, 10),
'subsample': trial.suggest_float('subsample', 0.4, 1.0),
'colsample_bytree': trial.suggest_float('colsample_bytree', 0.4, 1.0),
'n_estimators': 300,
}
# Create DMatrix for XGBoost
dtrain = xgb.DMatrix(X_train, label=y_train)
dtest = xgb.DMatrix(X_test, label=y_test)
# Define a pruning callback based on evaluation metrics
pruning_callback = optuna.integration.XGBoostPruningCallback(trial, "eval-mlogloss") # Match the metric name in the evals list
# Train the model
bst = xgb.train(
param,
dtrain,
num_boost_round=300,
evals=[(dtrain, "train"), (dtest, "eval")], # Ensure the eval datasets and names are specified
early_stopping_rounds=30,
callbacks=[pruning_callback]
)
# Predict on the test set
preds = bst.predict(dtest)
best_preds = [int(np.argmax(line)) for line in preds]
# Return accuracy as the objective value
accuracy = accuracy_score(y_test, best_preds)
return accuracy
# Create a study with pruning
study = optuna.create_study(direction='maximize', pruner=optuna.pruners.SuccessiveHalvingPruner())
study.optimize(objective, n_trials=50)
# Output the best trial
print(f"Best trial: {study.best_trial.params}")
print(f"Best accuracy: {study.best_value}")
[I 2024-09-27 22:42:18,475] A new study created in memory with name: no-name-0fe92904-5ced-4920-947b-3aab388e6db4
[0] train-mlogloss:0.96369 eval-mlogloss:0.95768 [1] train-mlogloss:0.86966 eval-mlogloss:0.86387 [2] train-mlogloss:0.79518 eval-mlogloss:0.78347 [3] train-mlogloss:0.71488 eval-mlogloss:0.69639 [4] train-mlogloss:0.67885 eval-mlogloss:0.66456 [5] train-mlogloss:0.63855 eval-mlogloss:0.62487 [6] train-mlogloss:0.59564 eval-mlogloss:0.58377 [7] train-mlogloss:0.53994 eval-mlogloss:0.52518 [8] train-mlogloss:0.49265 eval-mlogloss:0.47820 [9] train-mlogloss:0.48169 eval-mlogloss:0.47379 [10] train-mlogloss:0.43742 eval-mlogloss:0.42780 [11] train-mlogloss:0.41012 eval-mlogloss:0.39792 [12] train-mlogloss:0.37780 eval-mlogloss:0.36005 [13] train-mlogloss:0.36046 eval-mlogloss:0.34144 [14] train-mlogloss:0.32237 eval-mlogloss:0.29845 [15] train-mlogloss:0.30668 eval-mlogloss:0.28317 [16] train-mlogloss:0.28688 eval-mlogloss:0.26055 [17] train-mlogloss:0.26722 eval-mlogloss:0.23979 [18] train-mlogloss:0.25296 eval-mlogloss:0.22164 [19] train-mlogloss:0.23859 eval-mlogloss:0.20476 [20] train-mlogloss:0.22587 eval-mlogloss:0.18976 [21] train-mlogloss:0.22406 eval-mlogloss:0.18890 [22] train-mlogloss:0.22202 eval-mlogloss:0.18892 [23] train-mlogloss:0.20686 eval-mlogloss:0.17118 [24] train-mlogloss:0.20355 eval-mlogloss:0.16931 [25] train-mlogloss:0.19546 eval-mlogloss:0.15837 [26] train-mlogloss:0.19188 eval-mlogloss:0.15404 [27] train-mlogloss:0.18659 eval-mlogloss:0.14762 [28] train-mlogloss:0.18369 eval-mlogloss:0.14400 [29] train-mlogloss:0.17987 eval-mlogloss:0.13960 [30] train-mlogloss:0.17641 eval-mlogloss:0.13561 [31] train-mlogloss:0.17401 eval-mlogloss:0.13389 [32] train-mlogloss:0.16923 eval-mlogloss:0.12795 [33] train-mlogloss:0.16701 eval-mlogloss:0.12557 [34] train-mlogloss:0.16379 eval-mlogloss:0.12143 [35] train-mlogloss:0.15998 eval-mlogloss:0.11525 [36] train-mlogloss:0.15937 eval-mlogloss:0.11598 [37] train-mlogloss:0.15864 eval-mlogloss:0.11520 [38] train-mlogloss:0.15775 eval-mlogloss:0.11531 [39] train-mlogloss:0.15727 eval-mlogloss:0.11603 [40] train-mlogloss:0.15651 eval-mlogloss:0.11612 [41] train-mlogloss:0.15420 eval-mlogloss:0.11269 [42] train-mlogloss:0.15254 eval-mlogloss:0.11039 [43] train-mlogloss:0.15141 eval-mlogloss:0.11025 [44] train-mlogloss:0.14987 eval-mlogloss:0.10823 [45] train-mlogloss:0.14782 eval-mlogloss:0.10570 [46] train-mlogloss:0.14661 eval-mlogloss:0.10403 [47] train-mlogloss:0.14660 eval-mlogloss:0.10404 [48] train-mlogloss:0.14482 eval-mlogloss:0.10080 [49] train-mlogloss:0.14332 eval-mlogloss:0.09807 [50] train-mlogloss:0.14300 eval-mlogloss:0.09868 [51] train-mlogloss:0.14272 eval-mlogloss:0.09928 [52] train-mlogloss:0.14158 eval-mlogloss:0.09732 [53] train-mlogloss:0.14158 eval-mlogloss:0.09734 [54] train-mlogloss:0.14158 eval-mlogloss:0.09736 [55] train-mlogloss:0.14098 eval-mlogloss:0.09680 [56] train-mlogloss:0.13986 eval-mlogloss:0.09481 [57] train-mlogloss:0.13985 eval-mlogloss:0.09485 [58] train-mlogloss:0.13939 eval-mlogloss:0.09441 [59] train-mlogloss:0.13794 eval-mlogloss:0.09224 [60] train-mlogloss:0.13765 eval-mlogloss:0.09280 [61] train-mlogloss:0.13764 eval-mlogloss:0.09282 [62] train-mlogloss:0.13764 eval-mlogloss:0.09284 [63] train-mlogloss:0.13743 eval-mlogloss:0.09260 [64] train-mlogloss:0.13677 eval-mlogloss:0.09130 [65] train-mlogloss:0.13635 eval-mlogloss:0.09090 [66] train-mlogloss:0.13635 eval-mlogloss:0.09092 [67] train-mlogloss:0.13557 eval-mlogloss:0.08935 [68] train-mlogloss:0.13557 eval-mlogloss:0.08937 [69] train-mlogloss:0.13556 eval-mlogloss:0.08938 [70] train-mlogloss:0.13556 eval-mlogloss:0.08942 [71] train-mlogloss:0.13556 eval-mlogloss:0.08943 [72] train-mlogloss:0.13556 eval-mlogloss:0.08944 [73] train-mlogloss:0.13516 eval-mlogloss:0.08913 [74] train-mlogloss:0.13516 eval-mlogloss:0.08914 [75] train-mlogloss:0.13516 eval-mlogloss:0.08916 [76] train-mlogloss:0.13515 eval-mlogloss:0.08919 [77] train-mlogloss:0.13515 eval-mlogloss:0.08920 [78] train-mlogloss:0.13515 eval-mlogloss:0.08920 [79] train-mlogloss:0.13515 eval-mlogloss:0.08923 [80] train-mlogloss:0.13514 eval-mlogloss:0.08925 [81] train-mlogloss:0.13514 eval-mlogloss:0.08925 [82] train-mlogloss:0.13411 eval-mlogloss:0.08743 [83] train-mlogloss:0.13411 eval-mlogloss:0.08743 [84] train-mlogloss:0.13410 eval-mlogloss:0.08747 [85] train-mlogloss:0.13410 eval-mlogloss:0.08748 [86] train-mlogloss:0.13410 eval-mlogloss:0.08750 [87] train-mlogloss:0.13410 eval-mlogloss:0.08749 [88] train-mlogloss:0.13378 eval-mlogloss:0.08724 [89] train-mlogloss:0.13377 eval-mlogloss:0.08727 [90] train-mlogloss:0.13377 eval-mlogloss:0.08728 [91] train-mlogloss:0.13357 eval-mlogloss:0.08690 [92] train-mlogloss:0.13357 eval-mlogloss:0.08691 [93] train-mlogloss:0.13327 eval-mlogloss:0.08669 [94] train-mlogloss:0.13296 eval-mlogloss:0.08649 [95] train-mlogloss:0.13296 eval-mlogloss:0.08650 [96] train-mlogloss:0.13296 eval-mlogloss:0.08652 [97] train-mlogloss:0.13296 eval-mlogloss:0.08654 [98] train-mlogloss:0.13295 eval-mlogloss:0.08654 [99] train-mlogloss:0.13295 eval-mlogloss:0.08658 [100] train-mlogloss:0.13295 eval-mlogloss:0.08660 [101] train-mlogloss:0.13295 eval-mlogloss:0.08660 [102] train-mlogloss:0.13295 eval-mlogloss:0.08661 [103] train-mlogloss:0.13295 eval-mlogloss:0.08666 [104] train-mlogloss:0.13294 eval-mlogloss:0.08670 [105] train-mlogloss:0.13294 eval-mlogloss:0.08671 [106] train-mlogloss:0.13294 eval-mlogloss:0.08670 [107] train-mlogloss:0.13294 eval-mlogloss:0.08670 [108] train-mlogloss:0.13236 eval-mlogloss:0.08538 [109] train-mlogloss:0.13236 eval-mlogloss:0.08537 [110] train-mlogloss:0.13236 eval-mlogloss:0.08540 [111] train-mlogloss:0.13236 eval-mlogloss:0.08542 [112] train-mlogloss:0.13236 eval-mlogloss:0.08541 [113] train-mlogloss:0.13236 eval-mlogloss:0.08543 [114] train-mlogloss:0.13236 eval-mlogloss:0.08543 [115] train-mlogloss:0.13236 eval-mlogloss:0.08542 [116] train-mlogloss:0.13236 eval-mlogloss:0.08542 [117] train-mlogloss:0.13236 eval-mlogloss:0.08542 [118] train-mlogloss:0.13236 eval-mlogloss:0.08543 [119] train-mlogloss:0.13237 eval-mlogloss:0.08534 [120] train-mlogloss:0.13237 eval-mlogloss:0.08534 [121] train-mlogloss:0.13236 eval-mlogloss:0.08537 [122] train-mlogloss:0.13236 eval-mlogloss:0.08537 [123] train-mlogloss:0.13236 eval-mlogloss:0.08536 [124] train-mlogloss:0.13236 eval-mlogloss:0.08536 [125] train-mlogloss:0.13236 eval-mlogloss:0.08538 [126] train-mlogloss:0.13236 eval-mlogloss:0.08540 [127] train-mlogloss:0.13236 eval-mlogloss:0.08540 [128] train-mlogloss:0.13236 eval-mlogloss:0.08540 [129] train-mlogloss:0.13204 eval-mlogloss:0.08519 [130] train-mlogloss:0.13204 eval-mlogloss:0.08519 [131] train-mlogloss:0.13204 eval-mlogloss:0.08519 [132] train-mlogloss:0.13204 eval-mlogloss:0.08520 [133] train-mlogloss:0.13204 eval-mlogloss:0.08520 [134] train-mlogloss:0.13204 eval-mlogloss:0.08522 [135] train-mlogloss:0.13204 eval-mlogloss:0.08523 [136] train-mlogloss:0.13204 eval-mlogloss:0.08521 [137] train-mlogloss:0.13204 eval-mlogloss:0.08521 [138] train-mlogloss:0.13204 eval-mlogloss:0.08521 [139] train-mlogloss:0.13204 eval-mlogloss:0.08522 [140] train-mlogloss:0.13204 eval-mlogloss:0.08522 [141] train-mlogloss:0.13204 eval-mlogloss:0.08521 [142] train-mlogloss:0.13204 eval-mlogloss:0.08526 [143] train-mlogloss:0.13204 eval-mlogloss:0.08525 [144] train-mlogloss:0.13204 eval-mlogloss:0.08526 [145] train-mlogloss:0.13204 eval-mlogloss:0.08527 [146] train-mlogloss:0.13204 eval-mlogloss:0.08527 [147] train-mlogloss:0.13204 eval-mlogloss:0.08523 [148] train-mlogloss:0.13204 eval-mlogloss:0.08523 [149] train-mlogloss:0.13204 eval-mlogloss:0.08522 [150] train-mlogloss:0.13204 eval-mlogloss:0.08523 [151] train-mlogloss:0.13204 eval-mlogloss:0.08523 [152] train-mlogloss:0.13204 eval-mlogloss:0.08524 [153] train-mlogloss:0.13204 eval-mlogloss:0.08523 [154] train-mlogloss:0.13204 eval-mlogloss:0.08524 [155] train-mlogloss:0.13204 eval-mlogloss:0.08524 [156] train-mlogloss:0.13204 eval-mlogloss:0.08524 [157] train-mlogloss:0.13204 eval-mlogloss:0.08524 [158] train-mlogloss:0.13204 eval-mlogloss:0.08523 [159] train-mlogloss:0.13204 eval-mlogloss:0.08523
[I 2024-09-27 22:42:19,815] Trial 0 finished with value: 1.0 and parameters: {'lambda': 2.0969304028350764e-06, 'alpha': 0.023325358760680433, 'eta': 0.14810100269032767, 'gamma': 0.22514175819708912, 'max_depth': 8, 'min_child_weight': 6, 'subsample': 0.9866425347455645, 'colsample_bytree': 0.4919084310855477}. Best is trial 0 with value: 1.0.
[0] train-mlogloss:0.92744 eval-mlogloss:0.93188 [1] train-mlogloss:0.79647 eval-mlogloss:0.80582 [2] train-mlogloss:0.67210 eval-mlogloss:0.66964 [3] train-mlogloss:0.57604 eval-mlogloss:0.56328 [4] train-mlogloss:0.50728 eval-mlogloss:0.49873 [5] train-mlogloss:0.43980 eval-mlogloss:0.42605 [6] train-mlogloss:0.38269 eval-mlogloss:0.36750 [7] train-mlogloss:0.34336 eval-mlogloss:0.32864 [8] train-mlogloss:0.30745 eval-mlogloss:0.29518 [9] train-mlogloss:0.27181 eval-mlogloss:0.25725 [10] train-mlogloss:0.24080 eval-mlogloss:0.22167 [11] train-mlogloss:0.21505 eval-mlogloss:0.19362 [12] train-mlogloss:0.19686 eval-mlogloss:0.17602 [13] train-mlogloss:0.18152 eval-mlogloss:0.15834 [14] train-mlogloss:0.16451 eval-mlogloss:0.14253 [15] train-mlogloss:0.15227 eval-mlogloss:0.13008 [16] train-mlogloss:0.14005 eval-mlogloss:0.11680 [17] train-mlogloss:0.13038 eval-mlogloss:0.10913 [18] train-mlogloss:0.12223 eval-mlogloss:0.10219 [19] train-mlogloss:0.11631 eval-mlogloss:0.09683 [20] train-mlogloss:0.10964 eval-mlogloss:0.08822 [21] train-mlogloss:0.10607 eval-mlogloss:0.08457 [22] train-mlogloss:0.10063 eval-mlogloss:0.07696 [23] train-mlogloss:0.09629 eval-mlogloss:0.07095 [24] train-mlogloss:0.09216 eval-mlogloss:0.06612 [25] train-mlogloss:0.08970 eval-mlogloss:0.06208 [26] train-mlogloss:0.08583 eval-mlogloss:0.05605 [27] train-mlogloss:0.08396 eval-mlogloss:0.05270 [28] train-mlogloss:0.08268 eval-mlogloss:0.05090 [29] train-mlogloss:0.08164 eval-mlogloss:0.04912 [30] train-mlogloss:0.08056 eval-mlogloss:0.04865 [31] train-mlogloss:0.07963 eval-mlogloss:0.04788 [32] train-mlogloss:0.07911 eval-mlogloss:0.04593 [33] train-mlogloss:0.07824 eval-mlogloss:0.04671 [34] train-mlogloss:0.07732 eval-mlogloss:0.04612 [35] train-mlogloss:0.07616 eval-mlogloss:0.04317 [36] train-mlogloss:0.07545 eval-mlogloss:0.04407 [37] train-mlogloss:0.07422 eval-mlogloss:0.04401 [38] train-mlogloss:0.07398 eval-mlogloss:0.04463 [39] train-mlogloss:0.07328 eval-mlogloss:0.04474 [40] train-mlogloss:0.07228 eval-mlogloss:0.04562 [41] train-mlogloss:0.07145 eval-mlogloss:0.04541 [42] train-mlogloss:0.07105 eval-mlogloss:0.04528 [43] train-mlogloss:0.07085 eval-mlogloss:0.04528 [44] train-mlogloss:0.06971 eval-mlogloss:0.04411 [45] train-mlogloss:0.06893 eval-mlogloss:0.04197 [46] train-mlogloss:0.06862 eval-mlogloss:0.04085 [47] train-mlogloss:0.06797 eval-mlogloss:0.03993 [48] train-mlogloss:0.06713 eval-mlogloss:0.03905 [49] train-mlogloss:0.06685 eval-mlogloss:0.03817 [50] train-mlogloss:0.06654 eval-mlogloss:0.03841 [51] train-mlogloss:0.06632 eval-mlogloss:0.03858 [52] train-mlogloss:0.06606 eval-mlogloss:0.03838 [53] train-mlogloss:0.06569 eval-mlogloss:0.03800 [54] train-mlogloss:0.06527 eval-mlogloss:0.03629 [55] train-mlogloss:0.06496 eval-mlogloss:0.03640 [56] train-mlogloss:0.06499 eval-mlogloss:0.03621 [57] train-mlogloss:0.06512 eval-mlogloss:0.03610 [58] train-mlogloss:0.06516 eval-mlogloss:0.03613 [59] train-mlogloss:0.06511 eval-mlogloss:0.03663 [60] train-mlogloss:0.06470 eval-mlogloss:0.03544 [61] train-mlogloss:0.06448 eval-mlogloss:0.03654 [62] train-mlogloss:0.06401 eval-mlogloss:0.03557 [63] train-mlogloss:0.06414 eval-mlogloss:0.03559 [64] train-mlogloss:0.06359 eval-mlogloss:0.03351 [65] train-mlogloss:0.06325 eval-mlogloss:0.03325 [66] train-mlogloss:0.06336 eval-mlogloss:0.03342 [67] train-mlogloss:0.06301 eval-mlogloss:0.03316 [68] train-mlogloss:0.06302 eval-mlogloss:0.03346 [69] train-mlogloss:0.06291 eval-mlogloss:0.03350 [70] train-mlogloss:0.06224 eval-mlogloss:0.03468 [71] train-mlogloss:0.06178 eval-mlogloss:0.03439 [72] train-mlogloss:0.06156 eval-mlogloss:0.03476 [73] train-mlogloss:0.06134 eval-mlogloss:0.03495 [74] train-mlogloss:0.06150 eval-mlogloss:0.03415 [75] train-mlogloss:0.06115 eval-mlogloss:0.03253 [76] train-mlogloss:0.06100 eval-mlogloss:0.03263 [77] train-mlogloss:0.06102 eval-mlogloss:0.03228 [78] train-mlogloss:0.06080 eval-mlogloss:0.03207 [79] train-mlogloss:0.06056 eval-mlogloss:0.03246 [80] train-mlogloss:0.06044 eval-mlogloss:0.03229 [81] train-mlogloss:0.06022 eval-mlogloss:0.03313 [82] train-mlogloss:0.06008 eval-mlogloss:0.03326 [83] train-mlogloss:0.06004 eval-mlogloss:0.03345 [84] train-mlogloss:0.05996 eval-mlogloss:0.03323 [85] train-mlogloss:0.05955 eval-mlogloss:0.03246 [86] train-mlogloss:0.05925 eval-mlogloss:0.03280 [87] train-mlogloss:0.05909 eval-mlogloss:0.03275 [88] train-mlogloss:0.05896 eval-mlogloss:0.03241 [89] train-mlogloss:0.05879 eval-mlogloss:0.03225 [90] train-mlogloss:0.05834 eval-mlogloss:0.03203 [91] train-mlogloss:0.05828 eval-mlogloss:0.03162 [92] train-mlogloss:0.05818 eval-mlogloss:0.03096 [93] train-mlogloss:0.05815 eval-mlogloss:0.03051 [94] train-mlogloss:0.05798 eval-mlogloss:0.03016 [95] train-mlogloss:0.05789 eval-mlogloss:0.02957 [96] train-mlogloss:0.05784 eval-mlogloss:0.02916 [97] train-mlogloss:0.05775 eval-mlogloss:0.02916 [98] train-mlogloss:0.05747 eval-mlogloss:0.02871 [99] train-mlogloss:0.05730 eval-mlogloss:0.02881 [100] train-mlogloss:0.05715 eval-mlogloss:0.02872 [101] train-mlogloss:0.05726 eval-mlogloss:0.02866 [102] train-mlogloss:0.05719 eval-mlogloss:0.02840 [103] train-mlogloss:0.05691 eval-mlogloss:0.02791 [104] train-mlogloss:0.05667 eval-mlogloss:0.02891 [105] train-mlogloss:0.05678 eval-mlogloss:0.02917 [106] train-mlogloss:0.05679 eval-mlogloss:0.02963 [107] train-mlogloss:0.05659 eval-mlogloss:0.02946 [108] train-mlogloss:0.05654 eval-mlogloss:0.02967 [109] train-mlogloss:0.05646 eval-mlogloss:0.03048 [110] train-mlogloss:0.05648 eval-mlogloss:0.03075 [111] train-mlogloss:0.05637 eval-mlogloss:0.03131 [112] train-mlogloss:0.05635 eval-mlogloss:0.03235 [113] train-mlogloss:0.05617 eval-mlogloss:0.03128 [114] train-mlogloss:0.05587 eval-mlogloss:0.03095 [115] train-mlogloss:0.05573 eval-mlogloss:0.03165 [116] train-mlogloss:0.05548 eval-mlogloss:0.03126 [117] train-mlogloss:0.05533 eval-mlogloss:0.03190 [118] train-mlogloss:0.05536 eval-mlogloss:0.03177 [119] train-mlogloss:0.05522 eval-mlogloss:0.03085 [120] train-mlogloss:0.05520 eval-mlogloss:0.03103 [121] train-mlogloss:0.05504 eval-mlogloss:0.03074 [122] train-mlogloss:0.05500 eval-mlogloss:0.03076 [123] train-mlogloss:0.05489 eval-mlogloss:0.03057 [124] train-mlogloss:0.05484 eval-mlogloss:0.02980 [125] train-mlogloss:0.05483 eval-mlogloss:0.02968 [126] train-mlogloss:0.05474 eval-mlogloss:0.02943 [127] train-mlogloss:0.05470 eval-mlogloss:0.02922 [128] train-mlogloss:0.05467 eval-mlogloss:0.02957 [129] train-mlogloss:0.05460 eval-mlogloss:0.02926 [130] train-mlogloss:0.05442 eval-mlogloss:0.02887 [131] train-mlogloss:0.05442 eval-mlogloss:0.02854 [132] train-mlogloss:0.05418 eval-mlogloss:0.02867 [133] train-mlogloss:0.05418 eval-mlogloss:0.02818
[I 2024-09-27 22:42:21,021] Trial 1 finished with value: 1.0 and parameters: {'lambda': 0.0016709893857175513, 'alpha': 0.00014757212853145138, 'eta': 0.15496350516629734, 'gamma': 0.0022078476643735685, 'max_depth': 6, 'min_child_weight': 2, 'subsample': 0.7491337982961688, 'colsample_bytree': 0.5099145375994963}. Best is trial 0 with value: 1.0.
[0] train-mlogloss:0.93719 eval-mlogloss:0.92571 [1] train-mlogloss:0.81869 eval-mlogloss:0.80996 [2] train-mlogloss:0.69337 eval-mlogloss:0.67145 [3] train-mlogloss:0.59399 eval-mlogloss:0.56200 [4] train-mlogloss:0.53220 eval-mlogloss:0.50205 [5] train-mlogloss:0.46387 eval-mlogloss:0.42918 [6] train-mlogloss:0.41033 eval-mlogloss:0.37047 [7] train-mlogloss:0.37411 eval-mlogloss:0.33378 [8] train-mlogloss:0.34242 eval-mlogloss:0.29856 [9] train-mlogloss:0.31196 eval-mlogloss:0.26527 [10] train-mlogloss:0.28827 eval-mlogloss:0.23762 [11] train-mlogloss:0.26991 eval-mlogloss:0.21645 [12] train-mlogloss:0.25296 eval-mlogloss:0.19739 [13] train-mlogloss:0.24962 eval-mlogloss:0.19265 [14] train-mlogloss:0.24272 eval-mlogloss:0.18520 [15] train-mlogloss:0.24229 eval-mlogloss:0.18535 [16] train-mlogloss:0.23608 eval-mlogloss:0.17787 [17] train-mlogloss:0.23081 eval-mlogloss:0.17070 [18] train-mlogloss:0.23034 eval-mlogloss:0.17019 [19] train-mlogloss:0.22975 eval-mlogloss:0.16984 [20] train-mlogloss:0.22446 eval-mlogloss:0.16329 [21] train-mlogloss:0.22402 eval-mlogloss:0.16151 [22] train-mlogloss:0.22329 eval-mlogloss:0.16108 [23] train-mlogloss:0.22277 eval-mlogloss:0.16083 [24] train-mlogloss:0.22230 eval-mlogloss:0.16067 [25] train-mlogloss:0.21811 eval-mlogloss:0.15378 [26] train-mlogloss:0.21795 eval-mlogloss:0.15357 [27] train-mlogloss:0.21740 eval-mlogloss:0.15319 [28] train-mlogloss:0.21708 eval-mlogloss:0.15316 [29] train-mlogloss:0.21688 eval-mlogloss:0.15238 [30] train-mlogloss:0.21637 eval-mlogloss:0.15202 [31] train-mlogloss:0.21632 eval-mlogloss:0.15189 [32] train-mlogloss:0.21628 eval-mlogloss:0.15186 [33] train-mlogloss:0.21600 eval-mlogloss:0.15061 [34] train-mlogloss:0.21510 eval-mlogloss:0.14981 [35] train-mlogloss:0.21491 eval-mlogloss:0.15007 [36] train-mlogloss:0.21476 eval-mlogloss:0.14991 [37] train-mlogloss:0.21340 eval-mlogloss:0.14804 [38] train-mlogloss:0.21303 eval-mlogloss:0.14765 [39] train-mlogloss:0.21296 eval-mlogloss:0.14747 [40] train-mlogloss:0.21292 eval-mlogloss:0.14725 [41] train-mlogloss:0.21259 eval-mlogloss:0.14710 [42] train-mlogloss:0.21235 eval-mlogloss:0.14691 [43] train-mlogloss:0.21209 eval-mlogloss:0.14680 [44] train-mlogloss:0.21188 eval-mlogloss:0.14657 [45] train-mlogloss:0.21173 eval-mlogloss:0.14627 [46] train-mlogloss:0.21156 eval-mlogloss:0.14613 [47] train-mlogloss:0.21116 eval-mlogloss:0.14594 [48] train-mlogloss:0.21076 eval-mlogloss:0.14583 [49] train-mlogloss:0.20681 eval-mlogloss:0.14040 [50] train-mlogloss:0.20690 eval-mlogloss:0.14058 [51] train-mlogloss:0.20677 eval-mlogloss:0.14042 [52] train-mlogloss:0.20669 eval-mlogloss:0.14035 [53] train-mlogloss:0.20675 eval-mlogloss:0.14031 [54] train-mlogloss:0.20662 eval-mlogloss:0.14022 [55] train-mlogloss:0.20632 eval-mlogloss:0.14003 [56] train-mlogloss:0.20618 eval-mlogloss:0.14020 [57] train-mlogloss:0.20610 eval-mlogloss:0.14001 [58] train-mlogloss:0.20613 eval-mlogloss:0.13998 [59] train-mlogloss:0.20596 eval-mlogloss:0.13988 [60] train-mlogloss:0.20587 eval-mlogloss:0.13971 [61] train-mlogloss:0.20582 eval-mlogloss:0.13961 [62] train-mlogloss:0.20556 eval-mlogloss:0.13805 [63] train-mlogloss:0.20534 eval-mlogloss:0.13806 [64] train-mlogloss:0.20526 eval-mlogloss:0.13824 [65] train-mlogloss:0.20483 eval-mlogloss:0.13775 [66] train-mlogloss:0.20454 eval-mlogloss:0.13748 [67] train-mlogloss:0.20455 eval-mlogloss:0.13750 [68] train-mlogloss:0.20441 eval-mlogloss:0.13734 [69] train-mlogloss:0.20432 eval-mlogloss:0.13754 [70] train-mlogloss:0.20433 eval-mlogloss:0.13761 [71] train-mlogloss:0.20426 eval-mlogloss:0.13723 [72] train-mlogloss:0.20409 eval-mlogloss:0.13729 [73] train-mlogloss:0.20411 eval-mlogloss:0.13740 [74] train-mlogloss:0.20405 eval-mlogloss:0.13766 [75] train-mlogloss:0.20396 eval-mlogloss:0.13698 [76] train-mlogloss:0.20393 eval-mlogloss:0.13673 [77] train-mlogloss:0.20390 eval-mlogloss:0.13654 [78] train-mlogloss:0.20370 eval-mlogloss:0.13630 [79] train-mlogloss:0.20364 eval-mlogloss:0.13595 [80] train-mlogloss:0.20346 eval-mlogloss:0.13585 [81] train-mlogloss:0.20327 eval-mlogloss:0.13580 [82] train-mlogloss:0.20297 eval-mlogloss:0.13570 [83] train-mlogloss:0.20297 eval-mlogloss:0.13566 [84] train-mlogloss:0.20288 eval-mlogloss:0.13559 [85] train-mlogloss:0.20279 eval-mlogloss:0.13579 [86] train-mlogloss:0.20259 eval-mlogloss:0.13566 [87] train-mlogloss:0.20251 eval-mlogloss:0.13587 [88] train-mlogloss:0.20227 eval-mlogloss:0.13580 [89] train-mlogloss:0.20219 eval-mlogloss:0.13594 [90] train-mlogloss:0.20185 eval-mlogloss:0.13561 [91] train-mlogloss:0.20187 eval-mlogloss:0.13562 [92] train-mlogloss:0.20184 eval-mlogloss:0.13559 [93] train-mlogloss:0.20179 eval-mlogloss:0.13568 [94] train-mlogloss:0.20159 eval-mlogloss:0.13558 [95] train-mlogloss:0.20158 eval-mlogloss:0.13566 [96] train-mlogloss:0.20144 eval-mlogloss:0.13547 [97] train-mlogloss:0.20142 eval-mlogloss:0.13573 [98] train-mlogloss:0.20143 eval-mlogloss:0.13585 [99] train-mlogloss:0.20126 eval-mlogloss:0.13572 [100] train-mlogloss:0.20118 eval-mlogloss:0.13594 [101] train-mlogloss:0.20119 eval-mlogloss:0.13590 [102] train-mlogloss:0.20103 eval-mlogloss:0.13573 [103] train-mlogloss:0.20091 eval-mlogloss:0.13561 [104] train-mlogloss:0.20087 eval-mlogloss:0.13576 [105] train-mlogloss:0.20081 eval-mlogloss:0.13595 [106] train-mlogloss:0.20081 eval-mlogloss:0.13600 [107] train-mlogloss:0.20060 eval-mlogloss:0.13581 [108] train-mlogloss:0.20047 eval-mlogloss:0.13559 [109] train-mlogloss:0.20049 eval-mlogloss:0.13568 [110] train-mlogloss:0.20049 eval-mlogloss:0.13565 [111] train-mlogloss:0.20048 eval-mlogloss:0.13583 [112] train-mlogloss:0.20053 eval-mlogloss:0.13607 [113] train-mlogloss:0.20046 eval-mlogloss:0.13606 [114] train-mlogloss:0.20030 eval-mlogloss:0.13579 [115] train-mlogloss:0.20028 eval-mlogloss:0.13576 [116] train-mlogloss:0.20027 eval-mlogloss:0.13579 [117] train-mlogloss:0.20014 eval-mlogloss:0.13574 [118] train-mlogloss:0.20010 eval-mlogloss:0.13569 [119] train-mlogloss:0.20009 eval-mlogloss:0.13550 [120] train-mlogloss:0.20001 eval-mlogloss:0.13543 [121] train-mlogloss:0.20003 eval-mlogloss:0.13550 [122] train-mlogloss:0.19999 eval-mlogloss:0.13545 [123] train-mlogloss:0.19999 eval-mlogloss:0.13544 [124] train-mlogloss:0.19997 eval-mlogloss:0.13553 [125] train-mlogloss:0.19996 eval-mlogloss:0.13550 [126] train-mlogloss:0.19982 eval-mlogloss:0.13574 [127] train-mlogloss:0.19977 eval-mlogloss:0.13574 [128] train-mlogloss:0.19962 eval-mlogloss:0.13575 [129] train-mlogloss:0.19965 eval-mlogloss:0.13573 [130] train-mlogloss:0.19952 eval-mlogloss:0.13561 [131] train-mlogloss:0.19946 eval-mlogloss:0.13576 [132] train-mlogloss:0.19945 eval-mlogloss:0.13578 [133] train-mlogloss:0.19944 eval-mlogloss:0.13560 [134] train-mlogloss:0.19938 eval-mlogloss:0.13590 [135] train-mlogloss:0.19936 eval-mlogloss:0.13592 [136] train-mlogloss:0.19939 eval-mlogloss:0.13583 [137] train-mlogloss:0.19933 eval-mlogloss:0.13547 [138] train-mlogloss:0.19911 eval-mlogloss:0.13544 [139] train-mlogloss:0.19912 eval-mlogloss:0.13545 [140] train-mlogloss:0.19913 eval-mlogloss:0.13540 [141] train-mlogloss:0.19904 eval-mlogloss:0.13534 [142] train-mlogloss:0.19889 eval-mlogloss:0.13512 [143] train-mlogloss:0.19880 eval-mlogloss:0.13418 [144] train-mlogloss:0.19879 eval-mlogloss:0.13430 [145] train-mlogloss:0.19874 eval-mlogloss:0.13422 [146] train-mlogloss:0.19870 eval-mlogloss:0.13418 [147] train-mlogloss:0.19871 eval-mlogloss:0.13408 [148] train-mlogloss:0.19865 eval-mlogloss:0.13398 [149] train-mlogloss:0.19855 eval-mlogloss:0.13395 [150] train-mlogloss:0.19855 eval-mlogloss:0.13389 [151] train-mlogloss:0.19853 eval-mlogloss:0.13398 [152] train-mlogloss:0.19848 eval-mlogloss:0.13420 [153] train-mlogloss:0.19841 eval-mlogloss:0.13412 [154] train-mlogloss:0.19841 eval-mlogloss:0.13420 [155] train-mlogloss:0.19840 eval-mlogloss:0.13419 [156] train-mlogloss:0.19834 eval-mlogloss:0.13403 [157] train-mlogloss:0.19815 eval-mlogloss:0.13391 [158] train-mlogloss:0.19815 eval-mlogloss:0.13392 [159] train-mlogloss:0.19802 eval-mlogloss:0.13371 [160] train-mlogloss:0.19804 eval-mlogloss:0.13383 [161] train-mlogloss:0.19796 eval-mlogloss:0.13309 [162] train-mlogloss:0.19791 eval-mlogloss:0.13307 [163] train-mlogloss:0.19788 eval-mlogloss:0.13302 [164] train-mlogloss:0.19789 eval-mlogloss:0.13287 [165] train-mlogloss:0.19789 eval-mlogloss:0.13284 [166] train-mlogloss:0.19782 eval-mlogloss:0.13286 [167] train-mlogloss:0.19780 eval-mlogloss:0.13293 [168] train-mlogloss:0.19779 eval-mlogloss:0.13291 [169] train-mlogloss:0.19774 eval-mlogloss:0.13291 [170] train-mlogloss:0.19761 eval-mlogloss:0.13277 [171] train-mlogloss:0.19745 eval-mlogloss:0.13269 [172] train-mlogloss:0.19752 eval-mlogloss:0.13258 [173] train-mlogloss:0.19757 eval-mlogloss:0.13255 [174] train-mlogloss:0.19755 eval-mlogloss:0.13268 [175] train-mlogloss:0.19755 eval-mlogloss:0.13275 [176] train-mlogloss:0.19753 eval-mlogloss:0.13252 [177] train-mlogloss:0.19753 eval-mlogloss:0.13249 [178] train-mlogloss:0.19752 eval-mlogloss:0.13247 [179] train-mlogloss:0.19752 eval-mlogloss:0.13249 [180] train-mlogloss:0.19751 eval-mlogloss:0.13235 [181] train-mlogloss:0.19753 eval-mlogloss:0.13226 [182] train-mlogloss:0.19751 eval-mlogloss:0.13224 [183] train-mlogloss:0.19749 eval-mlogloss:0.13226 [184] train-mlogloss:0.19745 eval-mlogloss:0.13178 [185] train-mlogloss:0.19746 eval-mlogloss:0.13176 [186] train-mlogloss:0.19746 eval-mlogloss:0.13159 [187] train-mlogloss:0.19744 eval-mlogloss:0.13169 [188] train-mlogloss:0.19742 eval-mlogloss:0.13170 [189] train-mlogloss:0.19740 eval-mlogloss:0.13159 [190] train-mlogloss:0.19737 eval-mlogloss:0.13170 [191] train-mlogloss:0.19729 eval-mlogloss:0.13158 [192] train-mlogloss:0.19731 eval-mlogloss:0.13146 [193] train-mlogloss:0.19735 eval-mlogloss:0.13136 [194] train-mlogloss:0.19732 eval-mlogloss:0.13133 [195] train-mlogloss:0.19735 eval-mlogloss:0.13126 [196] train-mlogloss:0.19734 eval-mlogloss:0.13140 [197] train-mlogloss:0.19725 eval-mlogloss:0.13138 [198] train-mlogloss:0.19726 eval-mlogloss:0.13140 [199] train-mlogloss:0.19726 eval-mlogloss:0.13134 [200] train-mlogloss:0.19716 eval-mlogloss:0.13155 [201] train-mlogloss:0.19714 eval-mlogloss:0.13150 [202] train-mlogloss:0.19714 eval-mlogloss:0.13145 [203] train-mlogloss:0.19714 eval-mlogloss:0.13144 [204] train-mlogloss:0.19715 eval-mlogloss:0.13126 [205] train-mlogloss:0.19717 eval-mlogloss:0.13124 [206] train-mlogloss:0.19719 eval-mlogloss:0.13118 [207] train-mlogloss:0.19704 eval-mlogloss:0.13112 [208] train-mlogloss:0.19706 eval-mlogloss:0.13110 [209] train-mlogloss:0.19703 eval-mlogloss:0.13124 [210] train-mlogloss:0.19702 eval-mlogloss:0.13124 [211] train-mlogloss:0.19702 eval-mlogloss:0.13122 [212] train-mlogloss:0.19703 eval-mlogloss:0.13131 [213] train-mlogloss:0.19703 eval-mlogloss:0.13133 [214] train-mlogloss:0.19696 eval-mlogloss:0.13122 [215] train-mlogloss:0.19689 eval-mlogloss:0.13097 [216] train-mlogloss:0.19692 eval-mlogloss:0.13088 [217] train-mlogloss:0.19691 eval-mlogloss:0.13086 [218] train-mlogloss:0.19683 eval-mlogloss:0.13077 [219] train-mlogloss:0.19683 eval-mlogloss:0.13075 [220] train-mlogloss:0.19684 eval-mlogloss:0.13057 [221] train-mlogloss:0.19683 eval-mlogloss:0.13076 [222] train-mlogloss:0.19683 eval-mlogloss:0.13084 [223] train-mlogloss:0.19688 eval-mlogloss:0.13071 [224] train-mlogloss:0.19689 eval-mlogloss:0.13064 [225] train-mlogloss:0.19689 eval-mlogloss:0.13061 [226] train-mlogloss:0.19689 eval-mlogloss:0.13084 [227] train-mlogloss:0.19685 eval-mlogloss:0.13084 [228] train-mlogloss:0.19670 eval-mlogloss:0.13087 [229] train-mlogloss:0.19667 eval-mlogloss:0.13084 [230] train-mlogloss:0.19667 eval-mlogloss:0.13082 [231] train-mlogloss:0.19668 eval-mlogloss:0.13072 [232] train-mlogloss:0.19667 eval-mlogloss:0.13076 [233] train-mlogloss:0.19671 eval-mlogloss:0.13135 [234] train-mlogloss:0.19671 eval-mlogloss:0.13141 [235] train-mlogloss:0.19672 eval-mlogloss:0.13149 [236] train-mlogloss:0.19672 eval-mlogloss:0.13147 [237] train-mlogloss:0.19671 eval-mlogloss:0.13136 [238] train-mlogloss:0.19675 eval-mlogloss:0.13188 [239] train-mlogloss:0.19678 eval-mlogloss:0.13211 [240] train-mlogloss:0.19651 eval-mlogloss:0.13192 [241] train-mlogloss:0.19650 eval-mlogloss:0.13197 [242] train-mlogloss:0.19642 eval-mlogloss:0.13182 [243] train-mlogloss:0.19644 eval-mlogloss:0.13175 [244] train-mlogloss:0.19642 eval-mlogloss:0.13165 [245] train-mlogloss:0.19642 eval-mlogloss:0.13179 [246] train-mlogloss:0.19640 eval-mlogloss:0.13195 [247] train-mlogloss:0.19638 eval-mlogloss:0.13188 [248] train-mlogloss:0.19639 eval-mlogloss:0.13183 [249] train-mlogloss:0.19638 eval-mlogloss:0.13196 [250] train-mlogloss:0.19640 eval-mlogloss:0.13182
[I 2024-09-27 22:42:22,876] Trial 2 finished with value: 1.0 and parameters: {'lambda': 0.012365904025869469, 'alpha': 1.0954652193851477e-07, 'eta': 0.15791282054069913, 'gamma': 1.8418342851933931e-06, 'max_depth': 9, 'min_child_weight': 9, 'subsample': 0.8305932262730233, 'colsample_bytree': 0.6545524663433488}. Best is trial 0 with value: 1.0.
[0] train-mlogloss:0.98697 eval-mlogloss:0.98948 [1] train-mlogloss:0.89722 eval-mlogloss:0.89764 [2] train-mlogloss:0.80437 eval-mlogloss:0.79908 [3] train-mlogloss:0.72115 eval-mlogloss:0.70965 [4] train-mlogloss:0.66028 eval-mlogloss:0.65181 [5] train-mlogloss:0.59755 eval-mlogloss:0.58555 [6] train-mlogloss:0.54287 eval-mlogloss:0.53189 [7] train-mlogloss:0.49916 eval-mlogloss:0.48769 [8] train-mlogloss:0.45887 eval-mlogloss:0.46229 [9] train-mlogloss:0.41843 eval-mlogloss:0.41909 [10] train-mlogloss:0.38325 eval-mlogloss:0.38087 [11] train-mlogloss:0.35189 eval-mlogloss:0.34909 [12] train-mlogloss:0.32739 eval-mlogloss:0.32586 [13] train-mlogloss:0.30722 eval-mlogloss:0.30283 [14] train-mlogloss:0.28449 eval-mlogloss:0.27541 [15] train-mlogloss:0.26494 eval-mlogloss:0.25360 [16] train-mlogloss:0.24589 eval-mlogloss:0.23605 [17] train-mlogloss:0.23186 eval-mlogloss:0.22356 [18] train-mlogloss:0.21637 eval-mlogloss:0.20700 [19] train-mlogloss:0.20520 eval-mlogloss:0.19905 [20] train-mlogloss:0.19213 eval-mlogloss:0.18216 [21] train-mlogloss:0.18575 eval-mlogloss:0.17420 [22] train-mlogloss:0.17615 eval-mlogloss:0.16237 [23] train-mlogloss:0.16545 eval-mlogloss:0.14937 [24] train-mlogloss:0.15423 eval-mlogloss:0.14053 [25] train-mlogloss:0.14557 eval-mlogloss:0.12874 [26] train-mlogloss:0.13762 eval-mlogloss:0.11870 [27] train-mlogloss:0.13170 eval-mlogloss:0.11053 [28] train-mlogloss:0.12626 eval-mlogloss:0.10393 [29] train-mlogloss:0.11960 eval-mlogloss:0.09663 [30] train-mlogloss:0.11494 eval-mlogloss:0.09236 [31] train-mlogloss:0.11054 eval-mlogloss:0.08691 [32] train-mlogloss:0.10630 eval-mlogloss:0.08249 [33] train-mlogloss:0.10248 eval-mlogloss:0.07716 [34] train-mlogloss:0.09905 eval-mlogloss:0.07400 [35] train-mlogloss:0.09706 eval-mlogloss:0.07133 [36] train-mlogloss:0.09419 eval-mlogloss:0.06823 [37] train-mlogloss:0.09236 eval-mlogloss:0.06596 [38] train-mlogloss:0.09089 eval-mlogloss:0.06422 [39] train-mlogloss:0.08907 eval-mlogloss:0.06371 [40] train-mlogloss:0.08689 eval-mlogloss:0.06222 [41] train-mlogloss:0.08508 eval-mlogloss:0.06058 [42] train-mlogloss:0.08415 eval-mlogloss:0.05916 [43] train-mlogloss:0.08324 eval-mlogloss:0.05890 [44] train-mlogloss:0.08252 eval-mlogloss:0.05721 [45] train-mlogloss:0.08082 eval-mlogloss:0.05380 [46] train-mlogloss:0.07864 eval-mlogloss:0.05183 [47] train-mlogloss:0.07747 eval-mlogloss:0.04974 [48] train-mlogloss:0.07623 eval-mlogloss:0.04893 [49] train-mlogloss:0.07552 eval-mlogloss:0.04735 [50] train-mlogloss:0.07569 eval-mlogloss:0.04808 [51] train-mlogloss:0.07492 eval-mlogloss:0.04792 [52] train-mlogloss:0.07404 eval-mlogloss:0.04708 [53] train-mlogloss:0.07346 eval-mlogloss:0.04672 [54] train-mlogloss:0.07287 eval-mlogloss:0.04660 [55] train-mlogloss:0.07203 eval-mlogloss:0.04705 [56] train-mlogloss:0.07147 eval-mlogloss:0.04541 [57] train-mlogloss:0.07130 eval-mlogloss:0.04416 [58] train-mlogloss:0.07144 eval-mlogloss:0.04415 [59] train-mlogloss:0.07072 eval-mlogloss:0.04350 [60] train-mlogloss:0.06963 eval-mlogloss:0.04192 [61] train-mlogloss:0.06888 eval-mlogloss:0.04110 [62] train-mlogloss:0.06844 eval-mlogloss:0.04142 [63] train-mlogloss:0.06794 eval-mlogloss:0.04057
[I 2024-09-27 22:42:23,289] Trial 3 pruned. Trial was pruned at iteration 64.
[0] train-mlogloss:1.07206 eval-mlogloss:1.07171 [1] train-mlogloss:1.04865 eval-mlogloss:1.05066 [2] train-mlogloss:1.02856 eval-mlogloss:1.02940 [3] train-mlogloss:1.00436 eval-mlogloss:1.00496 [4] train-mlogloss:0.98939 eval-mlogloss:0.99270 [5] train-mlogloss:0.97402 eval-mlogloss:0.97881 [6] train-mlogloss:0.95848 eval-mlogloss:0.96516 [7] train-mlogloss:0.93524 eval-mlogloss:0.94028 [8] train-mlogloss:0.91447 eval-mlogloss:0.91931 [9] train-mlogloss:0.90717 eval-mlogloss:0.91463 [10] train-mlogloss:0.88356 eval-mlogloss:0.89166 [11] train-mlogloss:0.86689 eval-mlogloss:0.87588 [12] train-mlogloss:0.84657 eval-mlogloss:0.85429 [13] train-mlogloss:0.83089 eval-mlogloss:0.83677 [14] train-mlogloss:0.80677 eval-mlogloss:0.81155 [15] train-mlogloss:0.79390 eval-mlogloss:0.79835 [16] train-mlogloss:0.77445 eval-mlogloss:0.77883 [17] train-mlogloss:0.75664 eval-mlogloss:0.76111 [18] train-mlogloss:0.73907 eval-mlogloss:0.74402 [19] train-mlogloss:0.72365 eval-mlogloss:0.72822 [20] train-mlogloss:0.70808 eval-mlogloss:0.71271 [21] train-mlogloss:0.70051 eval-mlogloss:0.70560 [22] train-mlogloss:0.69609 eval-mlogloss:0.70156 [23] train-mlogloss:0.67702 eval-mlogloss:0.68075 [24] train-mlogloss:0.67020 eval-mlogloss:0.67533 [25] train-mlogloss:0.65626 eval-mlogloss:0.65989 [26] train-mlogloss:0.64140 eval-mlogloss:0.64515 [27] train-mlogloss:0.63038 eval-mlogloss:0.63208 [28] train-mlogloss:0.61836 eval-mlogloss:0.61992 [29] train-mlogloss:0.60801 eval-mlogloss:0.60888 [30] train-mlogloss:0.59571 eval-mlogloss:0.59712 [31] train-mlogloss:0.58492 eval-mlogloss:0.58593 [32] train-mlogloss:0.56922 eval-mlogloss:0.56927 [33] train-mlogloss:0.55919 eval-mlogloss:0.55919 [34] train-mlogloss:0.54633 eval-mlogloss:0.54468 [35] train-mlogloss:0.53515 eval-mlogloss:0.53267 [36] train-mlogloss:0.52926 eval-mlogloss:0.52736 [37] train-mlogloss:0.52495 eval-mlogloss:0.52377 [38] train-mlogloss:0.51854 eval-mlogloss:0.51765 [39] train-mlogloss:0.51301 eval-mlogloss:0.51280 [40] train-mlogloss:0.50417 eval-mlogloss:0.50349 [41] train-mlogloss:0.49339 eval-mlogloss:0.49170 [42] train-mlogloss:0.48452 eval-mlogloss:0.48324 [43] train-mlogloss:0.47950 eval-mlogloss:0.47939 [44] train-mlogloss:0.46768 eval-mlogloss:0.46627 [45] train-mlogloss:0.45970 eval-mlogloss:0.45773 [46] train-mlogloss:0.45275 eval-mlogloss:0.45103 [47] train-mlogloss:0.44853 eval-mlogloss:0.44768 [48] train-mlogloss:0.44044 eval-mlogloss:0.43909 [49] train-mlogloss:0.43498 eval-mlogloss:0.43345 [50] train-mlogloss:0.43083 eval-mlogloss:0.43012 [51] train-mlogloss:0.42486 eval-mlogloss:0.42441 [52] train-mlogloss:0.41591 eval-mlogloss:0.41413 [53] train-mlogloss:0.40833 eval-mlogloss:0.40622 [54] train-mlogloss:0.40186 eval-mlogloss:0.39925 [55] train-mlogloss:0.39893 eval-mlogloss:0.39714 [56] train-mlogloss:0.39097 eval-mlogloss:0.38760 [57] train-mlogloss:0.38472 eval-mlogloss:0.38177 [58] train-mlogloss:0.38205 eval-mlogloss:0.37929 [59] train-mlogloss:0.37602 eval-mlogloss:0.37302 [60] train-mlogloss:0.37378 eval-mlogloss:0.37112 [61] train-mlogloss:0.36814 eval-mlogloss:0.36542 [62] train-mlogloss:0.36621 eval-mlogloss:0.36376 [63] train-mlogloss:0.35900 eval-mlogloss:0.35573 [64] train-mlogloss:0.35085 eval-mlogloss:0.34609 [65] train-mlogloss:0.34919 eval-mlogloss:0.34523 [66] train-mlogloss:0.34490 eval-mlogloss:0.34065 [67] train-mlogloss:0.33842 eval-mlogloss:0.33242 [68] train-mlogloss:0.33497 eval-mlogloss:0.33000 [69] train-mlogloss:0.33074 eval-mlogloss:0.32538 [70] train-mlogloss:0.32624 eval-mlogloss:0.32081 [71] train-mlogloss:0.32442 eval-mlogloss:0.31943 [72] train-mlogloss:0.31849 eval-mlogloss:0.31247 [73] train-mlogloss:0.31564 eval-mlogloss:0.30980 [74] train-mlogloss:0.31012 eval-mlogloss:0.30321 [75] train-mlogloss:0.30758 eval-mlogloss:0.30119 [76] train-mlogloss:0.30114 eval-mlogloss:0.29369 [77] train-mlogloss:0.29650 eval-mlogloss:0.28885 [78] train-mlogloss:0.29334 eval-mlogloss:0.28575 [79] train-mlogloss:0.29038 eval-mlogloss:0.28253 [80] train-mlogloss:0.28646 eval-mlogloss:0.27874 [81] train-mlogloss:0.28109 eval-mlogloss:0.27285 [82] train-mlogloss:0.27834 eval-mlogloss:0.27006 [83] train-mlogloss:0.27514 eval-mlogloss:0.26642 [84] train-mlogloss:0.27136 eval-mlogloss:0.26220 [85] train-mlogloss:0.26623 eval-mlogloss:0.25592 [86] train-mlogloss:0.26188 eval-mlogloss:0.25056 [87] train-mlogloss:0.25927 eval-mlogloss:0.24798 [88] train-mlogloss:0.25821 eval-mlogloss:0.24711 [89] train-mlogloss:0.25409 eval-mlogloss:0.24215 [90] train-mlogloss:0.24937 eval-mlogloss:0.23622 [91] train-mlogloss:0.24484 eval-mlogloss:0.23066 [92] train-mlogloss:0.24061 eval-mlogloss:0.22538 [93] train-mlogloss:0.23767 eval-mlogloss:0.22188 [94] train-mlogloss:0.23593 eval-mlogloss:0.21965 [95] train-mlogloss:0.23179 eval-mlogloss:0.21450 [96] train-mlogloss:0.22831 eval-mlogloss:0.21011 [97] train-mlogloss:0.22697 eval-mlogloss:0.20908 [98] train-mlogloss:0.22325 eval-mlogloss:0.20443 [99] train-mlogloss:0.22197 eval-mlogloss:0.20348 [100] train-mlogloss:0.22101 eval-mlogloss:0.20247 [101] train-mlogloss:0.21946 eval-mlogloss:0.20123 [102] train-mlogloss:0.21695 eval-mlogloss:0.19825 [103] train-mlogloss:0.21526 eval-mlogloss:0.19636 [104] train-mlogloss:0.21339 eval-mlogloss:0.19403 [105] train-mlogloss:0.21200 eval-mlogloss:0.19311 [106] train-mlogloss:0.20962 eval-mlogloss:0.19010 [107] train-mlogloss:0.20801 eval-mlogloss:0.18781 [108] train-mlogloss:0.20598 eval-mlogloss:0.18562 [109] train-mlogloss:0.20458 eval-mlogloss:0.18383 [110] train-mlogloss:0.20172 eval-mlogloss:0.18010 [111] train-mlogloss:0.19901 eval-mlogloss:0.17684 [112] train-mlogloss:0.19795 eval-mlogloss:0.17602 [113] train-mlogloss:0.19586 eval-mlogloss:0.17323 [114] train-mlogloss:0.19435 eval-mlogloss:0.17182 [115] train-mlogloss:0.19180 eval-mlogloss:0.16846 [116] train-mlogloss:0.19038 eval-mlogloss:0.16699 [117] train-mlogloss:0.18925 eval-mlogloss:0.16597 [118] train-mlogloss:0.18803 eval-mlogloss:0.16468 [119] train-mlogloss:0.18592 eval-mlogloss:0.16170 [120] train-mlogloss:0.18405 eval-mlogloss:0.15892 [121] train-mlogloss:0.18311 eval-mlogloss:0.15772 [122] train-mlogloss:0.18169 eval-mlogloss:0.15625 [123] train-mlogloss:0.18001 eval-mlogloss:0.15414 [124] train-mlogloss:0.17918 eval-mlogloss:0.15346 [125] train-mlogloss:0.17673 eval-mlogloss:0.15070 [126] train-mlogloss:0.17533 eval-mlogloss:0.14907 [127] train-mlogloss:0.17373 eval-mlogloss:0.14708 [128] train-mlogloss:0.17299 eval-mlogloss:0.14647 [129] train-mlogloss:0.17145 eval-mlogloss:0.14460 [130] train-mlogloss:0.17107 eval-mlogloss:0.14408 [131] train-mlogloss:0.16954 eval-mlogloss:0.14198 [132] train-mlogloss:0.16854 eval-mlogloss:0.14054 [133] train-mlogloss:0.16729 eval-mlogloss:0.13935 [134] train-mlogloss:0.16685 eval-mlogloss:0.13942 [135] train-mlogloss:0.16533 eval-mlogloss:0.13705 [136] train-mlogloss:0.16366 eval-mlogloss:0.13448 [137] train-mlogloss:0.16337 eval-mlogloss:0.13430 [138] train-mlogloss:0.16155 eval-mlogloss:0.13161 [139] train-mlogloss:0.16076 eval-mlogloss:0.13042 [140] train-mlogloss:0.16020 eval-mlogloss:0.12996 [141] train-mlogloss:0.15910 eval-mlogloss:0.12844 [142] train-mlogloss:0.15740 eval-mlogloss:0.12613 [143] train-mlogloss:0.15657 eval-mlogloss:0.12512 [144] train-mlogloss:0.15548 eval-mlogloss:0.12373 [145] train-mlogloss:0.15486 eval-mlogloss:0.12341 [146] train-mlogloss:0.15376 eval-mlogloss:0.12216 [147] train-mlogloss:0.15317 eval-mlogloss:0.12169 [148] train-mlogloss:0.15193 eval-mlogloss:0.12013 [149] train-mlogloss:0.15076 eval-mlogloss:0.11844 [150] train-mlogloss:0.14981 eval-mlogloss:0.11701 [151] train-mlogloss:0.14889 eval-mlogloss:0.11614 [152] train-mlogloss:0.14862 eval-mlogloss:0.11597 [153] train-mlogloss:0.14859 eval-mlogloss:0.11563 [154] train-mlogloss:0.14780 eval-mlogloss:0.11436 [155] train-mlogloss:0.14760 eval-mlogloss:0.11455 [156] train-mlogloss:0.14720 eval-mlogloss:0.11388 [157] train-mlogloss:0.14656 eval-mlogloss:0.11311 [158] train-mlogloss:0.14527 eval-mlogloss:0.11102 [159] train-mlogloss:0.14482 eval-mlogloss:0.11083 [160] train-mlogloss:0.14401 eval-mlogloss:0.10977 [161] train-mlogloss:0.14329 eval-mlogloss:0.10873 [162] train-mlogloss:0.14297 eval-mlogloss:0.10851 [163] train-mlogloss:0.14245 eval-mlogloss:0.10787 [164] train-mlogloss:0.14197 eval-mlogloss:0.10721 [165] train-mlogloss:0.14115 eval-mlogloss:0.10584 [166] train-mlogloss:0.14051 eval-mlogloss:0.10467 [167] train-mlogloss:0.13999 eval-mlogloss:0.10385 [168] train-mlogloss:0.13963 eval-mlogloss:0.10340 [169] train-mlogloss:0.13920 eval-mlogloss:0.10283 [170] train-mlogloss:0.13855 eval-mlogloss:0.10176 [171] train-mlogloss:0.13787 eval-mlogloss:0.10074 [172] train-mlogloss:0.13784 eval-mlogloss:0.10081 [173] train-mlogloss:0.13775 eval-mlogloss:0.10080 [174] train-mlogloss:0.13730 eval-mlogloss:0.10013 [175] train-mlogloss:0.13716 eval-mlogloss:0.10026 [176] train-mlogloss:0.13697 eval-mlogloss:0.10021 [177] train-mlogloss:0.13649 eval-mlogloss:0.09995 [178] train-mlogloss:0.13629 eval-mlogloss:0.09963 [179] train-mlogloss:0.13555 eval-mlogloss:0.09835 [180] train-mlogloss:0.13527 eval-mlogloss:0.09824 [181] train-mlogloss:0.13465 eval-mlogloss:0.09725 [182] train-mlogloss:0.13422 eval-mlogloss:0.09658 [183] train-mlogloss:0.13398 eval-mlogloss:0.09640 [184] train-mlogloss:0.13386 eval-mlogloss:0.09618 [185] train-mlogloss:0.13362 eval-mlogloss:0.09597 [186] train-mlogloss:0.13334 eval-mlogloss:0.09567 [187] train-mlogloss:0.13276 eval-mlogloss:0.09463 [188] train-mlogloss:0.13242 eval-mlogloss:0.09420 [189] train-mlogloss:0.13217 eval-mlogloss:0.09407 [190] train-mlogloss:0.13192 eval-mlogloss:0.09363 [191] train-mlogloss:0.13179 eval-mlogloss:0.09344 [192] train-mlogloss:0.13130 eval-mlogloss:0.09240 [193] train-mlogloss:0.13096 eval-mlogloss:0.09208 [194] train-mlogloss:0.13061 eval-mlogloss:0.09189 [195] train-mlogloss:0.13051 eval-mlogloss:0.09176 [196] train-mlogloss:0.12995 eval-mlogloss:0.09076 [197] train-mlogloss:0.12977 eval-mlogloss:0.09047 [198] train-mlogloss:0.12948 eval-mlogloss:0.08995 [199] train-mlogloss:0.12916 eval-mlogloss:0.08945 [200] train-mlogloss:0.12884 eval-mlogloss:0.08885 [201] train-mlogloss:0.12841 eval-mlogloss:0.08810 [202] train-mlogloss:0.12811 eval-mlogloss:0.08758 [203] train-mlogloss:0.12778 eval-mlogloss:0.08695 [204] train-mlogloss:0.12773 eval-mlogloss:0.08684 [205] train-mlogloss:0.12729 eval-mlogloss:0.08608 [206] train-mlogloss:0.12727 eval-mlogloss:0.08607 [207] train-mlogloss:0.12708 eval-mlogloss:0.08612 [208] train-mlogloss:0.12704 eval-mlogloss:0.08605 [209] train-mlogloss:0.12696 eval-mlogloss:0.08608 [210] train-mlogloss:0.12660 eval-mlogloss:0.08547 [211] train-mlogloss:0.12643 eval-mlogloss:0.08513 [212] train-mlogloss:0.12632 eval-mlogloss:0.08519 [213] train-mlogloss:0.12584 eval-mlogloss:0.08454 [214] train-mlogloss:0.12550 eval-mlogloss:0.08406 [215] train-mlogloss:0.12539 eval-mlogloss:0.08395 [216] train-mlogloss:0.12507 eval-mlogloss:0.08354 [217] train-mlogloss:0.12474 eval-mlogloss:0.08299 [218] train-mlogloss:0.12449 eval-mlogloss:0.08245 [219] train-mlogloss:0.12425 eval-mlogloss:0.08218 [220] train-mlogloss:0.12400 eval-mlogloss:0.08170 [221] train-mlogloss:0.12377 eval-mlogloss:0.08127 [222] train-mlogloss:0.12330 eval-mlogloss:0.08044 [223] train-mlogloss:0.12304 eval-mlogloss:0.08009 [224] train-mlogloss:0.12268 eval-mlogloss:0.07933 [225] train-mlogloss:0.12265 eval-mlogloss:0.07926 [226] train-mlogloss:0.12261 eval-mlogloss:0.07916 [227] train-mlogloss:0.12250 eval-mlogloss:0.07930 [228] train-mlogloss:0.12246 eval-mlogloss:0.07921 [229] train-mlogloss:0.12237 eval-mlogloss:0.07908 [230] train-mlogloss:0.12220 eval-mlogloss:0.07884 [231] train-mlogloss:0.12212 eval-mlogloss:0.07883 [232] train-mlogloss:0.12210 eval-mlogloss:0.07887 [233] train-mlogloss:0.12190 eval-mlogloss:0.07861 [234] train-mlogloss:0.12169 eval-mlogloss:0.07833 [235] train-mlogloss:0.12157 eval-mlogloss:0.07812 [236] train-mlogloss:0.12135 eval-mlogloss:0.07782 [237] train-mlogloss:0.12127 eval-mlogloss:0.07786 [238] train-mlogloss:0.12126 eval-mlogloss:0.07790 [239] train-mlogloss:0.12118 eval-mlogloss:0.07789 [240] train-mlogloss:0.12107 eval-mlogloss:0.07768 [241] train-mlogloss:0.12090 eval-mlogloss:0.07774 [242] train-mlogloss:0.12058 eval-mlogloss:0.07733 [243] train-mlogloss:0.12030 eval-mlogloss:0.07693 [244] train-mlogloss:0.12009 eval-mlogloss:0.07659 [245] train-mlogloss:0.11986 eval-mlogloss:0.07642 [246] train-mlogloss:0.11969 eval-mlogloss:0.07619 [247] train-mlogloss:0.11939 eval-mlogloss:0.07576 [248] train-mlogloss:0.11930 eval-mlogloss:0.07583 [249] train-mlogloss:0.11924 eval-mlogloss:0.07569 [250] train-mlogloss:0.11912 eval-mlogloss:0.07586 [251] train-mlogloss:0.11901 eval-mlogloss:0.07592 [252] train-mlogloss:0.11876 eval-mlogloss:0.07533 [253] train-mlogloss:0.11853 eval-mlogloss:0.07504 [254] train-mlogloss:0.11832 eval-mlogloss:0.07535 [255] train-mlogloss:0.11818 eval-mlogloss:0.07515 [256] train-mlogloss:0.11795 eval-mlogloss:0.07481 [257] train-mlogloss:0.11772 eval-mlogloss:0.07445 [258] train-mlogloss:0.11747 eval-mlogloss:0.07407 [259] train-mlogloss:0.11732 eval-mlogloss:0.07394 [260] train-mlogloss:0.11731 eval-mlogloss:0.07396 [261] train-mlogloss:0.11728 eval-mlogloss:0.07392 [262] train-mlogloss:0.11717 eval-mlogloss:0.07377 [263] train-mlogloss:0.11716 eval-mlogloss:0.07373 [264] train-mlogloss:0.11714 eval-mlogloss:0.07375 [265] train-mlogloss:0.11706 eval-mlogloss:0.07360 [266] train-mlogloss:0.11687 eval-mlogloss:0.07322 [267] train-mlogloss:0.11686 eval-mlogloss:0.07316 [268] train-mlogloss:0.11669 eval-mlogloss:0.07281 [269] train-mlogloss:0.11656 eval-mlogloss:0.07270 [270] train-mlogloss:0.11630 eval-mlogloss:0.07224 [271] train-mlogloss:0.11627 eval-mlogloss:0.07223 [272] train-mlogloss:0.11617 eval-mlogloss:0.07215 [273] train-mlogloss:0.11614 eval-mlogloss:0.07209 [274] train-mlogloss:0.11596 eval-mlogloss:0.07165 [275] train-mlogloss:0.11593 eval-mlogloss:0.07158 [276] train-mlogloss:0.11590 eval-mlogloss:0.07147 [277] train-mlogloss:0.11580 eval-mlogloss:0.07166 [278] train-mlogloss:0.11573 eval-mlogloss:0.07149 [279] train-mlogloss:0.11572 eval-mlogloss:0.07149 [280] train-mlogloss:0.11553 eval-mlogloss:0.07099 [281] train-mlogloss:0.11537 eval-mlogloss:0.07070 [282] train-mlogloss:0.11507 eval-mlogloss:0.07048 [283] train-mlogloss:0.11500 eval-mlogloss:0.07026 [284] train-mlogloss:0.11492 eval-mlogloss:0.07013 [285] train-mlogloss:0.11472 eval-mlogloss:0.06971 [286] train-mlogloss:0.11472 eval-mlogloss:0.06977 [287] train-mlogloss:0.11444 eval-mlogloss:0.06941 [288] train-mlogloss:0.11443 eval-mlogloss:0.06940 [289] train-mlogloss:0.11440 eval-mlogloss:0.06931 [290] train-mlogloss:0.11431 eval-mlogloss:0.06916 [291] train-mlogloss:0.11417 eval-mlogloss:0.06882 [292] train-mlogloss:0.11401 eval-mlogloss:0.06895 [293] train-mlogloss:0.11400 eval-mlogloss:0.06901 [294] train-mlogloss:0.11380 eval-mlogloss:0.06856 [295] train-mlogloss:0.11362 eval-mlogloss:0.06817 [296] train-mlogloss:0.11362 eval-mlogloss:0.06817 [297] train-mlogloss:0.11357 eval-mlogloss:0.06818 [298] train-mlogloss:0.11348 eval-mlogloss:0.06795 [299] train-mlogloss:0.11345 eval-mlogloss:0.06786
[I 2024-09-27 22:42:25,449] Trial 4 finished with value: 1.0 and parameters: {'lambda': 0.34483876023308846, 'alpha': 2.59196804015837e-06, 'eta': 0.028942903083954946, 'gamma': 1.888469606629467e-07, 'max_depth': 4, 'min_child_weight': 3, 'subsample': 0.7051287217263256, 'colsample_bytree': 0.46999964240861314}. Best is trial 0 with value: 1.0.
[0] train-mlogloss:0.88197 eval-mlogloss:0.86853
[I 2024-09-27 22:42:25,474] Trial 5 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.77129 eval-mlogloss:0.74652
[I 2024-09-27 22:42:25,500] Trial 6 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.00740 eval-mlogloss:1.00110 [1] train-mlogloss:0.92359 eval-mlogloss:0.91458
[I 2024-09-27 22:42:25,526] Trial 7 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.96998 eval-mlogloss:0.97367
[I 2024-09-27 22:42:25,555] Trial 8 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.03257 eval-mlogloss:1.02961 [1] train-mlogloss:0.97661 eval-mlogloss:0.97385 [2] train-mlogloss:0.91188 eval-mlogloss:0.90338 [3] train-mlogloss:0.85303 eval-mlogloss:0.83905 [4] train-mlogloss:0.80942 eval-mlogloss:0.79495
[I 2024-09-27 22:42:25,610] Trial 9 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:0.89296 eval-mlogloss:0.88028 [1] train-mlogloss:0.76636 eval-mlogloss:0.74559
[I 2024-09-27 22:42:25,727] Trial 10 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.88027 eval-mlogloss:0.86690
[I 2024-09-27 22:42:25,855] Trial 11 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.93010 eval-mlogloss:0.91975 [1] train-mlogloss:0.81447 eval-mlogloss:0.80672
[I 2024-09-27 22:42:25,992] Trial 12 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.96229 eval-mlogloss:0.95528 [1] train-mlogloss:0.85869 eval-mlogloss:0.85166
[I 2024-09-27 22:42:26,134] Trial 13 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.97922 eval-mlogloss:0.95908 [1] train-mlogloss:0.82688 eval-mlogloss:0.81113
[I 2024-09-27 22:42:26,282] Trial 14 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.94958 eval-mlogloss:0.94713
[I 2024-09-27 22:42:26,418] Trial 15 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.94340 eval-mlogloss:0.93244
[I 2024-09-27 22:42:26,552] Trial 16 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.74917 eval-mlogloss:0.71923 [1] train-mlogloss:0.54106 eval-mlogloss:0.50903
[I 2024-09-27 22:42:26,683] Trial 17 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.99011 eval-mlogloss:0.98368 [1] train-mlogloss:0.90411 eval-mlogloss:0.89406
[I 2024-09-27 22:42:26,835] Trial 18 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.08174 eval-mlogloss:1.08076 [1] train-mlogloss:1.06694 eval-mlogloss:1.06595 [2] train-mlogloss:1.05370 eval-mlogloss:1.05070 [3] train-mlogloss:1.03745 eval-mlogloss:1.03360 [4] train-mlogloss:1.02795 eval-mlogloss:1.02426 [5] train-mlogloss:1.01808 eval-mlogloss:1.01448 [6] train-mlogloss:1.00753 eval-mlogloss:1.00476 [7] train-mlogloss:0.99098 eval-mlogloss:0.98691 [8] train-mlogloss:0.97667 eval-mlogloss:0.97246 [9] train-mlogloss:0.97147 eval-mlogloss:0.96824 [10] train-mlogloss:0.95533 eval-mlogloss:0.95065 [11] train-mlogloss:0.94326 eval-mlogloss:0.93798 [12] train-mlogloss:0.92938 eval-mlogloss:0.92223 [13] train-mlogloss:0.91811 eval-mlogloss:0.90963 [14] train-mlogloss:0.90035 eval-mlogloss:0.89070 [15] train-mlogloss:0.89104 eval-mlogloss:0.88041 [16] train-mlogloss:0.87646 eval-mlogloss:0.86544 [17] train-mlogloss:0.86378 eval-mlogloss:0.85291 [18] train-mlogloss:0.85000 eval-mlogloss:0.83875 [19] train-mlogloss:0.83797 eval-mlogloss:0.82632 [20] train-mlogloss:0.82559 eval-mlogloss:0.81385 [21] train-mlogloss:0.81989 eval-mlogloss:0.80805 [22] train-mlogloss:0.81629 eval-mlogloss:0.80436 [23] train-mlogloss:0.80105 eval-mlogloss:0.78821 [24] train-mlogloss:0.79524 eval-mlogloss:0.78384 [25] train-mlogloss:0.78367 eval-mlogloss:0.77138 [26] train-mlogloss:0.77211 eval-mlogloss:0.75997 [27] train-mlogloss:0.76322 eval-mlogloss:0.75026 [28] train-mlogloss:0.75374 eval-mlogloss:0.73991 [29] train-mlogloss:0.74504 eval-mlogloss:0.73065 [30] train-mlogloss:0.73467 eval-mlogloss:0.72079 [31] train-mlogloss:0.72530 eval-mlogloss:0.71118 [32] train-mlogloss:0.71181 eval-mlogloss:0.69710 [33] train-mlogloss:0.70265 eval-mlogloss:0.68818 [34] train-mlogloss:0.69118 eval-mlogloss:0.67574 [35] train-mlogloss:0.68140 eval-mlogloss:0.66524 [36] train-mlogloss:0.67601 eval-mlogloss:0.65989 [37] train-mlogloss:0.67241 eval-mlogloss:0.65573 [38] train-mlogloss:0.66662 eval-mlogloss:0.64973 [39] train-mlogloss:0.66161 eval-mlogloss:0.64474 [40] train-mlogloss:0.65334 eval-mlogloss:0.63618 [41] train-mlogloss:0.64319 eval-mlogloss:0.62599 [42] train-mlogloss:0.63477 eval-mlogloss:0.61773 [43] train-mlogloss:0.63039 eval-mlogloss:0.61344 [44] train-mlogloss:0.61932 eval-mlogloss:0.60156 [45] train-mlogloss:0.61183 eval-mlogloss:0.59347 [46] train-mlogloss:0.60515 eval-mlogloss:0.58719 [47] train-mlogloss:0.60103 eval-mlogloss:0.58289 [48] train-mlogloss:0.59340 eval-mlogloss:0.57469 [49] train-mlogloss:0.58822 eval-mlogloss:0.56898 [50] train-mlogloss:0.58404 eval-mlogloss:0.56487 [51] train-mlogloss:0.57826 eval-mlogloss:0.55945 [52] train-mlogloss:0.56965 eval-mlogloss:0.54973 [53] train-mlogloss:0.56217 eval-mlogloss:0.54150 [54] train-mlogloss:0.55556 eval-mlogloss:0.53450 [55] train-mlogloss:0.55296 eval-mlogloss:0.53173 [56] train-mlogloss:0.54492 eval-mlogloss:0.52273 [57] train-mlogloss:0.53857 eval-mlogloss:0.51613 [58] train-mlogloss:0.53592 eval-mlogloss:0.51309 [59] train-mlogloss:0.52969 eval-mlogloss:0.50603 [60] train-mlogloss:0.52733 eval-mlogloss:0.50346 [61] train-mlogloss:0.52190 eval-mlogloss:0.49814 [62] train-mlogloss:0.51982 eval-mlogloss:0.49655 [63] train-mlogloss:0.51257 eval-mlogloss:0.48886 [64] train-mlogloss:0.50419 eval-mlogloss:0.47963 [65] train-mlogloss:0.50280 eval-mlogloss:0.47822 [66] train-mlogloss:0.49867 eval-mlogloss:0.47395 [67] train-mlogloss:0.49211 eval-mlogloss:0.46620 [68] train-mlogloss:0.48821 eval-mlogloss:0.46213 [69] train-mlogloss:0.48360 eval-mlogloss:0.45693 [70] train-mlogloss:0.47827 eval-mlogloss:0.45138 [71] train-mlogloss:0.47587 eval-mlogloss:0.44943 [72] train-mlogloss:0.46898 eval-mlogloss:0.44167 [73] train-mlogloss:0.46556 eval-mlogloss:0.43771 [74] train-mlogloss:0.45963 eval-mlogloss:0.43152 [75] train-mlogloss:0.45686 eval-mlogloss:0.42876 [76] train-mlogloss:0.45010 eval-mlogloss:0.42060 [77] train-mlogloss:0.44523 eval-mlogloss:0.41552 [78] train-mlogloss:0.44168 eval-mlogloss:0.41200 [79] train-mlogloss:0.43775 eval-mlogloss:0.40794 [80] train-mlogloss:0.43327 eval-mlogloss:0.40328 [81] train-mlogloss:0.42726 eval-mlogloss:0.39708 [82] train-mlogloss:0.42428 eval-mlogloss:0.39431 [83] train-mlogloss:0.42070 eval-mlogloss:0.38997 [84] train-mlogloss:0.41664 eval-mlogloss:0.38549 [85] train-mlogloss:0.41066 eval-mlogloss:0.37838 [86] train-mlogloss:0.40523 eval-mlogloss:0.37235 [87] train-mlogloss:0.40212 eval-mlogloss:0.36876 [88] train-mlogloss:0.40092 eval-mlogloss:0.36762 [89] train-mlogloss:0.39700 eval-mlogloss:0.36307 [90] train-mlogloss:0.39188 eval-mlogloss:0.35695 [91] train-mlogloss:0.38692 eval-mlogloss:0.35124 [92] train-mlogloss:0.38172 eval-mlogloss:0.34529 [93] train-mlogloss:0.37810 eval-mlogloss:0.34121 [94] train-mlogloss:0.37567 eval-mlogloss:0.33825 [95] train-mlogloss:0.37075 eval-mlogloss:0.33239 [96] train-mlogloss:0.36643 eval-mlogloss:0.32724 [97] train-mlogloss:0.36456 eval-mlogloss:0.32542 [98] train-mlogloss:0.36001 eval-mlogloss:0.32035 [99] train-mlogloss:0.35819 eval-mlogloss:0.31859 [100] train-mlogloss:0.35686 eval-mlogloss:0.31700 [101] train-mlogloss:0.35476 eval-mlogloss:0.31539 [102] train-mlogloss:0.35132 eval-mlogloss:0.31181 [103] train-mlogloss:0.34904 eval-mlogloss:0.30953 [104] train-mlogloss:0.34634 eval-mlogloss:0.30632 [105] train-mlogloss:0.34410 eval-mlogloss:0.30406 [106] train-mlogloss:0.34108 eval-mlogloss:0.30043 [107] train-mlogloss:0.33877 eval-mlogloss:0.29748 [108] train-mlogloss:0.33597 eval-mlogloss:0.29424 [109] train-mlogloss:0.33440 eval-mlogloss:0.29233 [110] train-mlogloss:0.33059 eval-mlogloss:0.28796 [111] train-mlogloss:0.32662 eval-mlogloss:0.28375 [112] train-mlogloss:0.32515 eval-mlogloss:0.28241 [113] train-mlogloss:0.32256 eval-mlogloss:0.27955 [114] train-mlogloss:0.32070 eval-mlogloss:0.27720 [115] train-mlogloss:0.31832 eval-mlogloss:0.27459 [116] train-mlogloss:0.31611 eval-mlogloss:0.27195 [117] train-mlogloss:0.31442 eval-mlogloss:0.27039 [118] train-mlogloss:0.31264 eval-mlogloss:0.26827 [119] train-mlogloss:0.31061 eval-mlogloss:0.26609 [120] train-mlogloss:0.30842 eval-mlogloss:0.26365 [121] train-mlogloss:0.30753 eval-mlogloss:0.26276 [122] train-mlogloss:0.30567 eval-mlogloss:0.26055 [123] train-mlogloss:0.30335 eval-mlogloss:0.25792 [124] train-mlogloss:0.30181 eval-mlogloss:0.25626 [125] train-mlogloss:0.29866 eval-mlogloss:0.25289 [126] train-mlogloss:0.29662 eval-mlogloss:0.25022 [127] train-mlogloss:0.29410 eval-mlogloss:0.24735 [128] train-mlogloss:0.29333 eval-mlogloss:0.24682 [129] train-mlogloss:0.29114 eval-mlogloss:0.24415 [130] train-mlogloss:0.29025 eval-mlogloss:0.24299 [131] train-mlogloss:0.28779 eval-mlogloss:0.24008 [132] train-mlogloss:0.28665 eval-mlogloss:0.23836 [133] train-mlogloss:0.28525 eval-mlogloss:0.23706 [134] train-mlogloss:0.28468 eval-mlogloss:0.23688 [135] train-mlogloss:0.28274 eval-mlogloss:0.23436 [136] train-mlogloss:0.28049 eval-mlogloss:0.23196 [137] train-mlogloss:0.28000 eval-mlogloss:0.23174 [138] train-mlogloss:0.27837 eval-mlogloss:0.22971 [139] train-mlogloss:0.27709 eval-mlogloss:0.22819 [140] train-mlogloss:0.27614 eval-mlogloss:0.22753 [141] train-mlogloss:0.27503 eval-mlogloss:0.22636 [142] train-mlogloss:0.27241 eval-mlogloss:0.22321 [143] train-mlogloss:0.27099 eval-mlogloss:0.22185 [144] train-mlogloss:0.27058 eval-mlogloss:0.22136 [145] train-mlogloss:0.27048 eval-mlogloss:0.22129 [146] train-mlogloss:0.26859 eval-mlogloss:0.21919 [147] train-mlogloss:0.26756 eval-mlogloss:0.21793 [148] train-mlogloss:0.26643 eval-mlogloss:0.21653 [149] train-mlogloss:0.26418 eval-mlogloss:0.21400 [150] train-mlogloss:0.26279 eval-mlogloss:0.21255 [151] train-mlogloss:0.26110 eval-mlogloss:0.21079 [152] train-mlogloss:0.26011 eval-mlogloss:0.20985 [153] train-mlogloss:0.25998 eval-mlogloss:0.20939 [154] train-mlogloss:0.25912 eval-mlogloss:0.20849 [155] train-mlogloss:0.25827 eval-mlogloss:0.20768 [156] train-mlogloss:0.25752 eval-mlogloss:0.20689 [157] train-mlogloss:0.25645 eval-mlogloss:0.20585 [158] train-mlogloss:0.25466 eval-mlogloss:0.20372 [159] train-mlogloss:0.25334 eval-mlogloss:0.20229 [160] train-mlogloss:0.25236 eval-mlogloss:0.20128 [161] train-mlogloss:0.25074 eval-mlogloss:0.19924 [162] train-mlogloss:0.25048 eval-mlogloss:0.19899 [163] train-mlogloss:0.24957 eval-mlogloss:0.19796 [164] train-mlogloss:0.24888 eval-mlogloss:0.19727 [165] train-mlogloss:0.24771 eval-mlogloss:0.19588 [166] train-mlogloss:0.24636 eval-mlogloss:0.19410 [167] train-mlogloss:0.24529 eval-mlogloss:0.19292 [168] train-mlogloss:0.24519 eval-mlogloss:0.19285 [169] train-mlogloss:0.24344 eval-mlogloss:0.19094 [170] train-mlogloss:0.24210 eval-mlogloss:0.18927 [171] train-mlogloss:0.24109 eval-mlogloss:0.18813 [172] train-mlogloss:0.24100 eval-mlogloss:0.18798 [173] train-mlogloss:0.24089 eval-mlogloss:0.18775 [174] train-mlogloss:0.23999 eval-mlogloss:0.18663 [175] train-mlogloss:0.23983 eval-mlogloss:0.18654 [176] train-mlogloss:0.23961 eval-mlogloss:0.18639 [177] train-mlogloss:0.23941 eval-mlogloss:0.18634 [178] train-mlogloss:0.23906 eval-mlogloss:0.18600 [179] train-mlogloss:0.23796 eval-mlogloss:0.18469 [180] train-mlogloss:0.23754 eval-mlogloss:0.18431 [181] train-mlogloss:0.23689 eval-mlogloss:0.18361 [182] train-mlogloss:0.23616 eval-mlogloss:0.18275 [183] train-mlogloss:0.23604 eval-mlogloss:0.18272 [184] train-mlogloss:0.23583 eval-mlogloss:0.18238 [185] train-mlogloss:0.23499 eval-mlogloss:0.18145 [186] train-mlogloss:0.23441 eval-mlogloss:0.18079 [187] train-mlogloss:0.23390 eval-mlogloss:0.18022 [188] train-mlogloss:0.23353 eval-mlogloss:0.17982 [189] train-mlogloss:0.23340 eval-mlogloss:0.17972 [190] train-mlogloss:0.23313 eval-mlogloss:0.17942 [191] train-mlogloss:0.23291 eval-mlogloss:0.17907 [192] train-mlogloss:0.23220 eval-mlogloss:0.17816 [193] train-mlogloss:0.23212 eval-mlogloss:0.17826 [194] train-mlogloss:0.23196 eval-mlogloss:0.17815 [195] train-mlogloss:0.23178 eval-mlogloss:0.17792 [196] train-mlogloss:0.23097 eval-mlogloss:0.17688 [197] train-mlogloss:0.23063 eval-mlogloss:0.17636 [198] train-mlogloss:0.22987 eval-mlogloss:0.17554 [199] train-mlogloss:0.22916 eval-mlogloss:0.17460 [200] train-mlogloss:0.22843 eval-mlogloss:0.17370 [201] train-mlogloss:0.22697 eval-mlogloss:0.17187 [202] train-mlogloss:0.22633 eval-mlogloss:0.17106 [203] train-mlogloss:0.22573 eval-mlogloss:0.17034 [204] train-mlogloss:0.22563 eval-mlogloss:0.17025 [205] train-mlogloss:0.22562 eval-mlogloss:0.17029 [206] train-mlogloss:0.22552 eval-mlogloss:0.17013 [207] train-mlogloss:0.22544 eval-mlogloss:0.17016 [208] train-mlogloss:0.22541 eval-mlogloss:0.17013 [209] train-mlogloss:0.22535 eval-mlogloss:0.17019 [210] train-mlogloss:0.22464 eval-mlogloss:0.16937 [211] train-mlogloss:0.22457 eval-mlogloss:0.16940 [212] train-mlogloss:0.22447 eval-mlogloss:0.16948 [213] train-mlogloss:0.22348 eval-mlogloss:0.16839 [214] train-mlogloss:0.22287 eval-mlogloss:0.16752 [215] train-mlogloss:0.22279 eval-mlogloss:0.16736 [216] train-mlogloss:0.22275 eval-mlogloss:0.16733 [217] train-mlogloss:0.22221 eval-mlogloss:0.16657 [218] train-mlogloss:0.22164 eval-mlogloss:0.16586 [219] train-mlogloss:0.22143 eval-mlogloss:0.16576 [220] train-mlogloss:0.22104 eval-mlogloss:0.16528 [221] train-mlogloss:0.22058 eval-mlogloss:0.16457 [222] train-mlogloss:0.21997 eval-mlogloss:0.16378 [223] train-mlogloss:0.21943 eval-mlogloss:0.16317 [224] train-mlogloss:0.21881 eval-mlogloss:0.16239 [225] train-mlogloss:0.21872 eval-mlogloss:0.16223 [226] train-mlogloss:0.21871 eval-mlogloss:0.16224 [227] train-mlogloss:0.21862 eval-mlogloss:0.16236 [228] train-mlogloss:0.21864 eval-mlogloss:0.16237 [229] train-mlogloss:0.21851 eval-mlogloss:0.16214 [230] train-mlogloss:0.21802 eval-mlogloss:0.16163 [231] train-mlogloss:0.21797 eval-mlogloss:0.16157 [232] train-mlogloss:0.21759 eval-mlogloss:0.16106 [233] train-mlogloss:0.21751 eval-mlogloss:0.16101 [234] train-mlogloss:0.21733 eval-mlogloss:0.16077 [235] train-mlogloss:0.21733 eval-mlogloss:0.16079 [236] train-mlogloss:0.21689 eval-mlogloss:0.16016 [237] train-mlogloss:0.21687 eval-mlogloss:0.16018 [238] train-mlogloss:0.21684 eval-mlogloss:0.16019 [239] train-mlogloss:0.21685 eval-mlogloss:0.16029 [240] train-mlogloss:0.21633 eval-mlogloss:0.15956 [241] train-mlogloss:0.21628 eval-mlogloss:0.15969 [242] train-mlogloss:0.21574 eval-mlogloss:0.15904 [243] train-mlogloss:0.21521 eval-mlogloss:0.15825 [244] train-mlogloss:0.21473 eval-mlogloss:0.15766 [245] train-mlogloss:0.21458 eval-mlogloss:0.15766 [246] train-mlogloss:0.21447 eval-mlogloss:0.15772 [247] train-mlogloss:0.21411 eval-mlogloss:0.15732 [248] train-mlogloss:0.21401 eval-mlogloss:0.15722 [249] train-mlogloss:0.21391 eval-mlogloss:0.15706 [250] train-mlogloss:0.21389 eval-mlogloss:0.15687 [251] train-mlogloss:0.21387 eval-mlogloss:0.15689 [252] train-mlogloss:0.21330 eval-mlogloss:0.15622 [253] train-mlogloss:0.21287 eval-mlogloss:0.15559 [254] train-mlogloss:0.21276 eval-mlogloss:0.15555 [255] train-mlogloss:0.21274 eval-mlogloss:0.15544 [256] train-mlogloss:0.21261 eval-mlogloss:0.15533 [257] train-mlogloss:0.21258 eval-mlogloss:0.15518 [258] train-mlogloss:0.21243 eval-mlogloss:0.15502 [259] train-mlogloss:0.21203 eval-mlogloss:0.15446 [260] train-mlogloss:0.21193 eval-mlogloss:0.15428 [261] train-mlogloss:0.21182 eval-mlogloss:0.15414 [262] train-mlogloss:0.21173 eval-mlogloss:0.15404 [263] train-mlogloss:0.21168 eval-mlogloss:0.15394 [264] train-mlogloss:0.21158 eval-mlogloss:0.15373 [265] train-mlogloss:0.21143 eval-mlogloss:0.15345 [266] train-mlogloss:0.21135 eval-mlogloss:0.15337 [267] train-mlogloss:0.21134 eval-mlogloss:0.15329 [268] train-mlogloss:0.21126 eval-mlogloss:0.15319 [269] train-mlogloss:0.21123 eval-mlogloss:0.15311 [270] train-mlogloss:0.21118 eval-mlogloss:0.15303 [271] train-mlogloss:0.21119 eval-mlogloss:0.15304 [272] train-mlogloss:0.21118 eval-mlogloss:0.15306 [273] train-mlogloss:0.21117 eval-mlogloss:0.15309 [274] train-mlogloss:0.21117 eval-mlogloss:0.15307 [275] train-mlogloss:0.21119 eval-mlogloss:0.15309 [276] train-mlogloss:0.21118 eval-mlogloss:0.15306 [277] train-mlogloss:0.21112 eval-mlogloss:0.15299 [278] train-mlogloss:0.21073 eval-mlogloss:0.15247 [279] train-mlogloss:0.21060 eval-mlogloss:0.15228 [280] train-mlogloss:0.21047 eval-mlogloss:0.15213 [281] train-mlogloss:0.21046 eval-mlogloss:0.15214 [282] train-mlogloss:0.21038 eval-mlogloss:0.15208 [283] train-mlogloss:0.21025 eval-mlogloss:0.15196 [284] train-mlogloss:0.21012 eval-mlogloss:0.15183 [285] train-mlogloss:0.21009 eval-mlogloss:0.15167 [286] train-mlogloss:0.21002 eval-mlogloss:0.15158 [287] train-mlogloss:0.20997 eval-mlogloss:0.15154 [288] train-mlogloss:0.20997 eval-mlogloss:0.15159 [289] train-mlogloss:0.20988 eval-mlogloss:0.15146 [290] train-mlogloss:0.20980 eval-mlogloss:0.15137 [291] train-mlogloss:0.20969 eval-mlogloss:0.15117 [292] train-mlogloss:0.20946 eval-mlogloss:0.15118 [293] train-mlogloss:0.20946 eval-mlogloss:0.15121 [294] train-mlogloss:0.20944 eval-mlogloss:0.15128 [295] train-mlogloss:0.20940 eval-mlogloss:0.15126 [296] train-mlogloss:0.20938 eval-mlogloss:0.15119 [297] train-mlogloss:0.20936 eval-mlogloss:0.15115 [298] train-mlogloss:0.20876 eval-mlogloss:0.15023 [299] train-mlogloss:0.20868 eval-mlogloss:0.15014
[I 2024-09-27 22:42:29,052] Trial 19 finished with value: 1.0 and parameters: {'lambda': 0.0001089250310099782, 'alpha': 1.3127706074061831e-05, 'eta': 0.018370439537168826, 'gamma': 0.005384139663017008, 'max_depth': 6, 'min_child_weight': 6, 'subsample': 0.6162611618924605, 'colsample_bytree': 0.46890471015776325}. Best is trial 0 with value: 1.0.
[0] train-mlogloss:0.80427 eval-mlogloss:0.77884
[I 2024-09-27 22:42:29,185] Trial 20 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.93606 eval-mlogloss:0.92450
[I 2024-09-27 22:42:29,305] Trial 21 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.94161 eval-mlogloss:0.93216
[I 2024-09-27 22:42:29,427] Trial 22 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.90920 eval-mlogloss:0.89461
[I 2024-09-27 22:42:29,555] Trial 23 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.96102 eval-mlogloss:0.95119
[I 2024-09-27 22:42:29,682] Trial 24 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.96762 eval-mlogloss:0.95841 [1] train-mlogloss:0.87376 eval-mlogloss:0.86604
[I 2024-09-27 22:42:29,818] Trial 25 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.01799 eval-mlogloss:1.01423 [1] train-mlogloss:0.95111 eval-mlogloss:0.94923 [2] train-mlogloss:0.87189 eval-mlogloss:0.86179 [3] train-mlogloss:0.80279 eval-mlogloss:0.78733
[I 2024-09-27 22:42:29,991] Trial 26 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:0.89330 eval-mlogloss:0.87703 [1] train-mlogloss:0.75268 eval-mlogloss:0.73963
[I 2024-09-27 22:42:30,138] Trial 27 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.88207 eval-mlogloss:0.87098
[I 2024-09-27 22:42:30,284] Trial 28 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.86731 eval-mlogloss:0.86015 [1] train-mlogloss:0.71489 eval-mlogloss:0.70723
[I 2024-09-27 22:42:30,441] Trial 29 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.97513 eval-mlogloss:0.96706
[I 2024-09-27 22:42:30,589] Trial 30 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.08426 eval-mlogloss:1.08443 [1] train-mlogloss:1.07142 eval-mlogloss:1.07293 [2] train-mlogloss:1.06038 eval-mlogloss:1.06124 [3] train-mlogloss:1.04648 eval-mlogloss:1.04736 [4] train-mlogloss:1.03773 eval-mlogloss:1.04059 [5] train-mlogloss:1.02884 eval-mlogloss:1.03257 [6] train-mlogloss:1.01963 eval-mlogloss:1.02434 [7] train-mlogloss:1.00584 eval-mlogloss:1.00927 [8] train-mlogloss:0.99303 eval-mlogloss:0.99658 [9] train-mlogloss:0.98817 eval-mlogloss:0.99363 [10] train-mlogloss:0.97384 eval-mlogloss:0.98020 [11] train-mlogloss:0.96321 eval-mlogloss:0.97080 [12] train-mlogloss:0.95054 eval-mlogloss:0.95743 [13] train-mlogloss:0.94021 eval-mlogloss:0.94638 [14] train-mlogloss:0.92478 eval-mlogloss:0.93026 [15] train-mlogloss:0.91617 eval-mlogloss:0.92173 [16] train-mlogloss:0.90312 eval-mlogloss:0.90832 [17] train-mlogloss:0.89127 eval-mlogloss:0.89657 [18] train-mlogloss:0.87913 eval-mlogloss:0.88467 [19] train-mlogloss:0.86850 eval-mlogloss:0.87383 [20] train-mlogloss:0.85762 eval-mlogloss:0.86293 [21] train-mlogloss:0.85179 eval-mlogloss:0.85804 [22] train-mlogloss:0.84819 eval-mlogloss:0.85501 [23] train-mlogloss:0.83480 eval-mlogloss:0.84058 [24] train-mlogloss:0.82943 eval-mlogloss:0.83642 [25] train-mlogloss:0.81927 eval-mlogloss:0.82553 [26] train-mlogloss:0.80828 eval-mlogloss:0.81478 [27] train-mlogloss:0.79965 eval-mlogloss:0.80584 [28] train-mlogloss:0.79050 eval-mlogloss:0.79697 [29] train-mlogloss:0.78260 eval-mlogloss:0.78889 [30] train-mlogloss:0.77322 eval-mlogloss:0.77980 [31] train-mlogloss:0.76468 eval-mlogloss:0.77124 [32] train-mlogloss:0.75254 eval-mlogloss:0.75821 [33] train-mlogloss:0.74433 eval-mlogloss:0.75000 [34] train-mlogloss:0.73416 eval-mlogloss:0.73899 [35] train-mlogloss:0.72522 eval-mlogloss:0.72975 [36] train-mlogloss:0.71997 eval-mlogloss:0.72487 [37] train-mlogloss:0.71587 eval-mlogloss:0.72150 [38] train-mlogloss:0.71038 eval-mlogloss:0.71633 [39] train-mlogloss:0.70534 eval-mlogloss:0.71190 [40] train-mlogloss:0.69772 eval-mlogloss:0.70426 [41] train-mlogloss:0.68842 eval-mlogloss:0.69438 [42] train-mlogloss:0.68069 eval-mlogloss:0.68696 [43] train-mlogloss:0.67598 eval-mlogloss:0.68286 [44] train-mlogloss:0.66594 eval-mlogloss:0.67204 [45] train-mlogloss:0.65874 eval-mlogloss:0.66465 [46] train-mlogloss:0.65238 eval-mlogloss:0.65832 [47] train-mlogloss:0.64792 eval-mlogloss:0.65466 [48] train-mlogloss:0.64057 eval-mlogloss:0.64677 [49] train-mlogloss:0.63526 eval-mlogloss:0.64116 [50] train-mlogloss:0.63113 eval-mlogloss:0.63745 [51] train-mlogloss:0.62549 eval-mlogloss:0.63207 [52] train-mlogloss:0.61743 eval-mlogloss:0.62314 [53] train-mlogloss:0.61007 eval-mlogloss:0.61553 [54] train-mlogloss:0.60393 eval-mlogloss:0.60952 [55] train-mlogloss:0.60042 eval-mlogloss:0.60705 [56] train-mlogloss:0.59296 eval-mlogloss:0.59852 [57] train-mlogloss:0.58672 eval-mlogloss:0.59251 [58] train-mlogloss:0.58350 eval-mlogloss:0.58955 [59] train-mlogloss:0.57757 eval-mlogloss:0.58285 [60] train-mlogloss:0.57467 eval-mlogloss:0.58047 [61] train-mlogloss:0.56899 eval-mlogloss:0.57489 [62] train-mlogloss:0.56638 eval-mlogloss:0.57251 [63] train-mlogloss:0.55932 eval-mlogloss:0.56490 [64] train-mlogloss:0.55145 eval-mlogloss:0.55605 [65] train-mlogloss:0.54937 eval-mlogloss:0.55442 [66] train-mlogloss:0.54484 eval-mlogloss:0.54995 [67] train-mlogloss:0.53826 eval-mlogloss:0.54204 [68] train-mlogloss:0.53402 eval-mlogloss:0.53859 [69] train-mlogloss:0.52912 eval-mlogloss:0.53352 [70] train-mlogloss:0.52412 eval-mlogloss:0.52831 [71] train-mlogloss:0.52132 eval-mlogloss:0.52616 [72] train-mlogloss:0.51507 eval-mlogloss:0.51895 [73] train-mlogloss:0.51133 eval-mlogloss:0.51525 [74] train-mlogloss:0.50536 eval-mlogloss:0.50844 [75] train-mlogloss:0.50247 eval-mlogloss:0.50590 [76] train-mlogloss:0.49560 eval-mlogloss:0.49838 [77] train-mlogloss:0.49027 eval-mlogloss:0.49291 [78] train-mlogloss:0.48640 eval-mlogloss:0.48945 [79] train-mlogloss:0.48245 eval-mlogloss:0.48571 [80] train-mlogloss:0.47786 eval-mlogloss:0.48106 [81] train-mlogloss:0.47166 eval-mlogloss:0.47443 [82] train-mlogloss:0.46827 eval-mlogloss:0.47105 [83] train-mlogloss:0.46447 eval-mlogloss:0.46675 [84] train-mlogloss:0.45973 eval-mlogloss:0.46182 [85] train-mlogloss:0.45385 eval-mlogloss:0.45505 [86] train-mlogloss:0.44864 eval-mlogloss:0.44915 [87] train-mlogloss:0.44512 eval-mlogloss:0.44546 [88] train-mlogloss:0.44334 eval-mlogloss:0.44385 [89] train-mlogloss:0.43842 eval-mlogloss:0.43837 [90] train-mlogloss:0.43280 eval-mlogloss:0.43180 [91] train-mlogloss:0.42726 eval-mlogloss:0.42554 [92] train-mlogloss:0.42189 eval-mlogloss:0.41951 [93] train-mlogloss:0.41797 eval-mlogloss:0.41525 [94] train-mlogloss:0.41511 eval-mlogloss:0.41231 [95] train-mlogloss:0.40998 eval-mlogloss:0.40639 [96] train-mlogloss:0.40545 eval-mlogloss:0.40122 [97] train-mlogloss:0.40332 eval-mlogloss:0.39930 [98] train-mlogloss:0.39825 eval-mlogloss:0.39365 [99] train-mlogloss:0.39622 eval-mlogloss:0.39184 [100] train-mlogloss:0.39420 eval-mlogloss:0.38988 [101] train-mlogloss:0.39176 eval-mlogloss:0.38767 [102] train-mlogloss:0.38819 eval-mlogloss:0.38387 [103] train-mlogloss:0.38572 eval-mlogloss:0.38111 [104] train-mlogloss:0.38257 eval-mlogloss:0.37781 [105] train-mlogloss:0.38013 eval-mlogloss:0.37557 [106] train-mlogloss:0.37631 eval-mlogloss:0.37150 [107] train-mlogloss:0.37325 eval-mlogloss:0.36834 [108] train-mlogloss:0.37009 eval-mlogloss:0.36517 [109] train-mlogloss:0.36785 eval-mlogloss:0.36322 [110] train-mlogloss:0.36355 eval-mlogloss:0.35818 [111] train-mlogloss:0.35931 eval-mlogloss:0.35344 [112] train-mlogloss:0.35756 eval-mlogloss:0.35209 [113] train-mlogloss:0.35408 eval-mlogloss:0.34814 [114] train-mlogloss:0.35122 eval-mlogloss:0.34538 [115] train-mlogloss:0.34723 eval-mlogloss:0.34071 [116] train-mlogloss:0.34470 eval-mlogloss:0.33820 [117] train-mlogloss:0.34250 eval-mlogloss:0.33640 [118] train-mlogloss:0.33984 eval-mlogloss:0.33369 [119] train-mlogloss:0.33640 eval-mlogloss:0.32954 [120] train-mlogloss:0.33333 eval-mlogloss:0.32584 [121] train-mlogloss:0.33142 eval-mlogloss:0.32386 [122] train-mlogloss:0.32911 eval-mlogloss:0.32159 [123] train-mlogloss:0.32642 eval-mlogloss:0.31868 [124] train-mlogloss:0.32445 eval-mlogloss:0.31733 [125] train-mlogloss:0.32065 eval-mlogloss:0.31323 [126] train-mlogloss:0.31787 eval-mlogloss:0.31025 [127] train-mlogloss:0.31534 eval-mlogloss:0.30724 [128] train-mlogloss:0.31359 eval-mlogloss:0.30579 [129] train-mlogloss:0.31107 eval-mlogloss:0.30295 [130] train-mlogloss:0.30982 eval-mlogloss:0.30192 [131] train-mlogloss:0.30680 eval-mlogloss:0.29843 [132] train-mlogloss:0.30500 eval-mlogloss:0.29665 [133] train-mlogloss:0.30267 eval-mlogloss:0.29445 [134] train-mlogloss:0.30177 eval-mlogloss:0.29407 [135] train-mlogloss:0.29886 eval-mlogloss:0.29065 [136] train-mlogloss:0.29562 eval-mlogloss:0.28667 [137] train-mlogloss:0.29491 eval-mlogloss:0.28640 [138] train-mlogloss:0.29162 eval-mlogloss:0.28239 [139] train-mlogloss:0.28920 eval-mlogloss:0.27956 [140] train-mlogloss:0.28775 eval-mlogloss:0.27825 [141] train-mlogloss:0.28498 eval-mlogloss:0.27475 [142] train-mlogloss:0.28193 eval-mlogloss:0.27111 [143] train-mlogloss:0.28030 eval-mlogloss:0.26960 [144] train-mlogloss:0.27793 eval-mlogloss:0.26710 [145] train-mlogloss:0.27668 eval-mlogloss:0.26621 [146] train-mlogloss:0.27465 eval-mlogloss:0.26421 [147] train-mlogloss:0.27322 eval-mlogloss:0.26318 [148] train-mlogloss:0.27114 eval-mlogloss:0.26063 [149] train-mlogloss:0.26903 eval-mlogloss:0.25832 [150] train-mlogloss:0.26669 eval-mlogloss:0.25560 [151] train-mlogloss:0.26481 eval-mlogloss:0.25377 [152] train-mlogloss:0.26282 eval-mlogloss:0.25189 [153] train-mlogloss:0.26165 eval-mlogloss:0.25072 [154] train-mlogloss:0.25903 eval-mlogloss:0.24773 [155] train-mlogloss:0.25792 eval-mlogloss:0.24712 [156] train-mlogloss:0.25618 eval-mlogloss:0.24520 [157] train-mlogloss:0.25482 eval-mlogloss:0.24402 [158] train-mlogloss:0.25223 eval-mlogloss:0.24065 [159] train-mlogloss:0.25047 eval-mlogloss:0.23904 [160] train-mlogloss:0.24797 eval-mlogloss:0.23628 [161] train-mlogloss:0.24632 eval-mlogloss:0.23431 [162] train-mlogloss:0.24476 eval-mlogloss:0.23277 [163] train-mlogloss:0.24348 eval-mlogloss:0.23152 [164] train-mlogloss:0.24200 eval-mlogloss:0.22997 [165] train-mlogloss:0.23988 eval-mlogloss:0.22720 [166] train-mlogloss:0.23843 eval-mlogloss:0.22541 [167] train-mlogloss:0.23724 eval-mlogloss:0.22406 [168] train-mlogloss:0.23567 eval-mlogloss:0.22234 [169] train-mlogloss:0.23404 eval-mlogloss:0.22073 [170] train-mlogloss:0.23212 eval-mlogloss:0.21841 [171] train-mlogloss:0.22989 eval-mlogloss:0.21574 [172] train-mlogloss:0.22893 eval-mlogloss:0.21488 [173] train-mlogloss:0.22820 eval-mlogloss:0.21435 [174] train-mlogloss:0.22696 eval-mlogloss:0.21302 [175] train-mlogloss:0.22602 eval-mlogloss:0.21261 [176] train-mlogloss:0.22489 eval-mlogloss:0.21151 [177] train-mlogloss:0.22335 eval-mlogloss:0.21003 [178] train-mlogloss:0.22176 eval-mlogloss:0.20831 [179] train-mlogloss:0.21988 eval-mlogloss:0.20583 [180] train-mlogloss:0.21886 eval-mlogloss:0.20504 [181] train-mlogloss:0.21684 eval-mlogloss:0.20267 [182] train-mlogloss:0.21541 eval-mlogloss:0.20109 [183] train-mlogloss:0.21448 eval-mlogloss:0.20016 [184] train-mlogloss:0.21380 eval-mlogloss:0.19960 [185] train-mlogloss:0.21230 eval-mlogloss:0.19814 [186] train-mlogloss:0.21123 eval-mlogloss:0.19720 [187] train-mlogloss:0.20926 eval-mlogloss:0.19479 [188] train-mlogloss:0.20796 eval-mlogloss:0.19338 [189] train-mlogloss:0.20706 eval-mlogloss:0.19249 [190] train-mlogloss:0.20602 eval-mlogloss:0.19159 [191] train-mlogloss:0.20524 eval-mlogloss:0.19070 [192] train-mlogloss:0.20346 eval-mlogloss:0.18844 [193] train-mlogloss:0.20252 eval-mlogloss:0.18740 [194] train-mlogloss:0.20116 eval-mlogloss:0.18611 [195] train-mlogloss:0.20039 eval-mlogloss:0.18538 [196] train-mlogloss:0.19859 eval-mlogloss:0.18318 [197] train-mlogloss:0.19771 eval-mlogloss:0.18235 [198] train-mlogloss:0.19631 eval-mlogloss:0.18074 [199] train-mlogloss:0.19542 eval-mlogloss:0.17967 [200] train-mlogloss:0.19414 eval-mlogloss:0.17787 [201] train-mlogloss:0.19294 eval-mlogloss:0.17631 [202] train-mlogloss:0.19214 eval-mlogloss:0.17540 [203] train-mlogloss:0.19060 eval-mlogloss:0.17324 [204] train-mlogloss:0.18943 eval-mlogloss:0.17186 [205] train-mlogloss:0.18795 eval-mlogloss:0.16979 [206] train-mlogloss:0.18685 eval-mlogloss:0.16872 [207] train-mlogloss:0.18651 eval-mlogloss:0.16871 [208] train-mlogloss:0.18542 eval-mlogloss:0.16747 [209] train-mlogloss:0.18472 eval-mlogloss:0.16654 [210] train-mlogloss:0.18386 eval-mlogloss:0.16555 [211] train-mlogloss:0.18300 eval-mlogloss:0.16451 [212] train-mlogloss:0.18260 eval-mlogloss:0.16425 [213] train-mlogloss:0.18164 eval-mlogloss:0.16312 [214] train-mlogloss:0.18082 eval-mlogloss:0.16214 [215] train-mlogloss:0.18021 eval-mlogloss:0.16148 [216] train-mlogloss:0.17948 eval-mlogloss:0.16080 [217] train-mlogloss:0.17810 eval-mlogloss:0.15920 [218] train-mlogloss:0.17704 eval-mlogloss:0.15783 [219] train-mlogloss:0.17627 eval-mlogloss:0.15694 [220] train-mlogloss:0.17527 eval-mlogloss:0.15591 [221] train-mlogloss:0.17414 eval-mlogloss:0.15472 [222] train-mlogloss:0.17274 eval-mlogloss:0.15301 [223] train-mlogloss:0.17188 eval-mlogloss:0.15212 [224] train-mlogloss:0.17055 eval-mlogloss:0.15052 [225] train-mlogloss:0.17031 eval-mlogloss:0.15030 [226] train-mlogloss:0.16963 eval-mlogloss:0.14927 [227] train-mlogloss:0.16914 eval-mlogloss:0.14893 [228] train-mlogloss:0.16868 eval-mlogloss:0.14849 [229] train-mlogloss:0.16807 eval-mlogloss:0.14806 [230] train-mlogloss:0.16691 eval-mlogloss:0.14674 [231] train-mlogloss:0.16620 eval-mlogloss:0.14585 [232] train-mlogloss:0.16592 eval-mlogloss:0.14576 [233] train-mlogloss:0.16506 eval-mlogloss:0.14502 [234] train-mlogloss:0.16415 eval-mlogloss:0.14414 [235] train-mlogloss:0.16301 eval-mlogloss:0.14252 [236] train-mlogloss:0.16222 eval-mlogloss:0.14175 [237] train-mlogloss:0.16170 eval-mlogloss:0.14097 [238] train-mlogloss:0.16104 eval-mlogloss:0.14017 [239] train-mlogloss:0.16076 eval-mlogloss:0.14015 [240] train-mlogloss:0.16000 eval-mlogloss:0.13949 [241] train-mlogloss:0.15982 eval-mlogloss:0.13950 [242] train-mlogloss:0.15914 eval-mlogloss:0.13886 [243] train-mlogloss:0.15789 eval-mlogloss:0.13739 [244] train-mlogloss:0.15705 eval-mlogloss:0.13616 [245] train-mlogloss:0.15646 eval-mlogloss:0.13554 [246] train-mlogloss:0.15598 eval-mlogloss:0.13497 [247] train-mlogloss:0.15519 eval-mlogloss:0.13402 [248] train-mlogloss:0.15475 eval-mlogloss:0.13368 [249] train-mlogloss:0.15434 eval-mlogloss:0.13325 [250] train-mlogloss:0.15406 eval-mlogloss:0.13305 [251] train-mlogloss:0.15355 eval-mlogloss:0.13240 [252] train-mlogloss:0.15245 eval-mlogloss:0.13076 [253] train-mlogloss:0.15169 eval-mlogloss:0.13004 [254] train-mlogloss:0.15122 eval-mlogloss:0.12988 [255] train-mlogloss:0.15075 eval-mlogloss:0.12933
[I 2024-09-27 22:42:32,786] Trial 31 pruned. Trial was pruned at iteration 256.
[0] train-mlogloss:1.06203 eval-mlogloss:1.06166 [1] train-mlogloss:1.03101 eval-mlogloss:1.03170 [2] train-mlogloss:1.00472 eval-mlogloss:1.00374 [3] train-mlogloss:0.97236 eval-mlogloss:0.97017 [4] train-mlogloss:0.95321 eval-mlogloss:0.95396
[I 2024-09-27 22:42:32,944] Trial 32 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:1.02168 eval-mlogloss:1.02389 [1] train-mlogloss:0.96058 eval-mlogloss:0.96882 [2] train-mlogloss:0.91071 eval-mlogloss:0.91790 [3] train-mlogloss:0.85204 eval-mlogloss:0.86000
[I 2024-09-27 22:42:33,097] Trial 33 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:1.01680 eval-mlogloss:1.01750 [1] train-mlogloss:0.95189 eval-mlogloss:0.95749 [2] train-mlogloss:0.89912 eval-mlogloss:0.90314 [3] train-mlogloss:0.83848 eval-mlogloss:0.83995
[I 2024-09-27 22:42:33,276] Trial 34 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:1.04916 eval-mlogloss:1.05092 [1] train-mlogloss:1.00448 eval-mlogloss:1.00938 [2] train-mlogloss:0.95334 eval-mlogloss:0.95288 [3] train-mlogloss:0.90631 eval-mlogloss:0.90295 [4] train-mlogloss:0.86912 eval-mlogloss:0.86796
[I 2024-09-27 22:42:33,434] Trial 35 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:0.94143 eval-mlogloss:0.94802 [1] train-mlogloss:0.81770 eval-mlogloss:0.83105
[I 2024-09-27 22:42:33,577] Trial 36 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.97185 eval-mlogloss:0.96876
[I 2024-09-27 22:42:33,711] Trial 37 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.92179 eval-mlogloss:0.91932 [1] train-mlogloss:0.78756 eval-mlogloss:0.79377
[I 2024-09-27 22:42:33,860] Trial 38 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.93226 eval-mlogloss:0.92997
[I 2024-09-27 22:42:34,008] Trial 39 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.04704 eval-mlogloss:1.04681 [1] train-mlogloss:1.00181 eval-mlogloss:0.99951 [2] train-mlogloss:0.94891 eval-mlogloss:0.94294 [3] train-mlogloss:0.90023 eval-mlogloss:0.88967
[I 2024-09-27 22:42:34,161] Trial 40 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:1.08458 eval-mlogloss:1.08381 [1] train-mlogloss:1.07217 eval-mlogloss:1.07139 [2] train-mlogloss:1.06104 eval-mlogloss:1.05852 [3] train-mlogloss:1.04728 eval-mlogloss:1.04406 [4] train-mlogloss:1.03934 eval-mlogloss:1.03582 [5] train-mlogloss:1.03095 eval-mlogloss:1.02752 [6] train-mlogloss:1.02199 eval-mlogloss:1.01928 [7] train-mlogloss:1.00780 eval-mlogloss:1.00399 [8] train-mlogloss:0.99556 eval-mlogloss:0.99164 [9] train-mlogloss:0.99104 eval-mlogloss:0.98798 [10] train-mlogloss:0.97717 eval-mlogloss:0.97288 [11] train-mlogloss:0.96689 eval-mlogloss:0.96204 [12] train-mlogloss:0.95486 eval-mlogloss:0.94840 [13] train-mlogloss:0.94502 eval-mlogloss:0.93742 [14] train-mlogloss:0.92919 eval-mlogloss:0.92048 [15] train-mlogloss:0.92107 eval-mlogloss:0.91151
[I 2024-09-27 22:42:34,395] Trial 41 pruned. Trial was pruned at iteration 16.
[0] train-mlogloss:1.07593 eval-mlogloss:1.07461 [1] train-mlogloss:1.05621 eval-mlogloss:1.05487 [2] train-mlogloss:1.03867 eval-mlogloss:1.03465 [3] train-mlogloss:1.01731 eval-mlogloss:1.01214
[I 2024-09-27 22:42:34,576] Trial 42 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:1.07016 eval-mlogloss:1.06963 [1] train-mlogloss:1.04623 eval-mlogloss:1.04581 [2] train-mlogloss:1.02742 eval-mlogloss:1.02665 [3] train-mlogloss:1.00146 eval-mlogloss:0.99902 [4] train-mlogloss:0.98681 eval-mlogloss:0.98641
[I 2024-09-27 22:42:34,740] Trial 43 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:1.02368 eval-mlogloss:1.01920 [1] train-mlogloss:0.95579 eval-mlogloss:0.94940
[I 2024-09-27 22:42:34,876] Trial 44 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.04117 eval-mlogloss:1.04215 [1] train-mlogloss:0.99404 eval-mlogloss:1.00063 [2] train-mlogloss:0.95391 eval-mlogloss:0.96059 [3] train-mlogloss:0.90661 eval-mlogloss:0.91358
[I 2024-09-27 22:42:35,032] Trial 45 pruned. Trial was pruned at iteration 4.
[0] train-mlogloss:0.97689 eval-mlogloss:0.96935 [1] train-mlogloss:0.88919 eval-mlogloss:0.88276
[I 2024-09-27 22:42:35,161] Trial 46 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.82996 eval-mlogloss:0.81187
[I 2024-09-27 22:42:35,294] Trial 47 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:1.00629 eval-mlogloss:1.00163
[I 2024-09-27 22:42:35,432] Trial 48 pruned. Trial was pruned at iteration 1.
[0] train-mlogloss:0.96061 eval-mlogloss:0.94935
[I 2024-09-27 22:42:35,572] Trial 49 pruned. Trial was pruned at iteration 1.
Best trial: {'lambda': 2.0969304028350764e-06, 'alpha': 0.023325358760680433, 'eta': 0.14810100269032767, 'gamma': 0.22514175819708912, 'max_depth': 8, 'min_child_weight': 6, 'subsample': 0.9866425347455645, 'colsample_bytree': 0.4919084310855477}
Best accuracy: 1.0
In [ ]:
In [34]:
! pip install optuna-integration[xgboost]
Defaulting to user installation because normal site-packages is not writeable Collecting optuna-integration[xgboost] Downloading optuna_integration-4.0.0-py3-none-any.whl.metadata (11 kB) Requirement already satisfied: optuna in c:\users\muhammad rasoul\appdata\roaming\python\python311\site-packages (from optuna-integration[xgboost]) (4.0.0) Requirement already satisfied: xgboost in c:\programdata\anaconda3\lib\site-packages (from optuna-integration[xgboost]) (1.6.2) Requirement already satisfied: alembic>=1.5.0 in c:\users\muhammad rasoul\appdata\roaming\python\python311\site-packages (from optuna->optuna-integration[xgboost]) (1.13.1) Requirement already satisfied: colorlog in c:\users\muhammad rasoul\appdata\roaming\python\python311\site-packages (from optuna->optuna-integration[xgboost]) (6.8.2) Requirement already satisfied: numpy in c:\users\muhammad rasoul\appdata\roaming\python\python311\site-packages (from optuna->optuna-integration[xgboost]) (1.24.4) Requirement already satisfied: packaging>=20.0 in c:\programdata\anaconda3\lib\site-packages (from optuna->optuna-integration[xgboost]) (24.1) Requirement already satisfied: sqlalchemy>=1.3.0 in c:\programdata\anaconda3\lib\site-packages (from optuna->optuna-integration[xgboost]) (2.0.30) Requirement already satisfied: tqdm in c:\programdata\anaconda3\lib\site-packages (from optuna->optuna-integration[xgboost]) (4.66.4) Requirement already satisfied: PyYAML in c:\programdata\anaconda3\lib\site-packages (from optuna->optuna-integration[xgboost]) (6.0.1) Requirement already satisfied: scipy in c:\programdata\anaconda3\lib\site-packages (from xgboost->optuna-integration[xgboost]) (1.13.1) Requirement already satisfied: Mako in c:\users\muhammad rasoul\appdata\roaming\python\python311\site-packages (from alembic>=1.5.0->optuna->optuna-integration[xgboost]) (1.3.5) Requirement already satisfied: typing-extensions>=4 in c:\users\muhammad rasoul\appdata\roaming\python\python311\site-packages (from alembic>=1.5.0->optuna->optuna-integration[xgboost]) (4.11.0) Requirement already satisfied: greenlet!=0.4.17 in c:\programdata\anaconda3\lib\site-packages (from sqlalchemy>=1.3.0->optuna->optuna-integration[xgboost]) (3.0.1) Requirement already satisfied: colorama in c:\programdata\anaconda3\lib\site-packages (from colorlog->optuna->optuna-integration[xgboost]) (0.4.6) Requirement already satisfied: MarkupSafe>=0.9.2 in c:\programdata\anaconda3\lib\site-packages (from Mako->alembic>=1.5.0->optuna->optuna-integration[xgboost]) (2.1.3) Downloading optuna_integration-4.0.0-py3-none-any.whl (96 kB) ---------------------------------------- 0.0/96.9 kB ? eta -:--:-- ---------------------------------------- 0.0/96.9 kB ? eta -:--:-- ---------------------------------------- 0.0/96.9 kB ? eta -:--:-- ---------------------------------------- 0.0/96.9 kB ? eta -:--:-- ---- ----------------------------------- 10.2/96.9 kB ? eta -:--:-- ------------ --------------------------- 30.7/96.9 kB 330.3 kB/s eta 0:00:01 ------------ --------------------------- 30.7/96.9 kB 330.3 kB/s eta 0:00:01 ---------------- ----------------------- 41.0/96.9 kB 245.8 kB/s eta 0:00:01 ---------------- ----------------------- 41.0/96.9 kB 245.8 kB/s eta 0:00:01 ------------------------- -------------- 61.4/96.9 kB 233.8 kB/s eta 0:00:01 ----------------------------- ---------- 71.7/96.9 kB 206.9 kB/s eta 0:00:01 ----------------------------- ---------- 71.7/96.9 kB 206.9 kB/s eta 0:00:01 -------------------------------------- - 92.2/96.9 kB 218.5 kB/s eta 0:00:01 ---------------------------------------- 96.9/96.9 kB 205.4 kB/s eta 0:00:00 Installing collected packages: optuna-integration Successfully installed optuna-integration-4.0.0
WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name'
In [37]:
from optuna.visualization import plot_intermediate_values
# 1. Plot intermediate values during the trials
plot_intermediate_values(study).show()
In [35]:
#!pip install xgboost
Defaulting to user installation because normal site-packages is not writeable Requirement already satisfied: xgboost in c:\programdata\anaconda3\lib\site-packages (1.6.2) Requirement already satisfied: numpy in c:\users\muhammad rasoul\appdata\roaming\python\python311\site-packages (from xgboost) (1.24.4) Requirement already satisfied: scipy in c:\programdata\anaconda3\lib\site-packages (from xgboost) (1.13.1)
WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name' WARNING: Skipping C:\ProgramData\anaconda3\Lib\site-packages\numpy-1.24.3.dist-info due to invalid metadata entry 'name'